lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
agpl-3.0
|
7767482ab21581c001e9d01689785b1db1c69145
| 0
|
printedheart/opennars,printedheart/opennars,printedheart/opennars,printedheart/opennars,printedheart/opennars,printedheart/opennars
|
package nars.inference;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import nars.core.EventEmitter.Observer;
import nars.core.Events.ConceptBeliefRemove;
import nars.core.Events.TaskDerive;
import nars.core.Memory;
import nars.core.Parameters;
import nars.entity.Concept;
import nars.entity.Sentence;
import nars.entity.Stamp;
import nars.entity.Task;
import nars.entity.TruthValue;
import nars.inference.GraphExecutive.ParticlePlan;
import nars.io.Symbols;
import nars.io.Texts;
import nars.io.buffer.PriorityBuffer;
import nars.language.Conjunction;
import nars.language.Implication;
import nars.language.Interval;
import nars.language.Term;
import static nars.language.Terms.equalSubTermsInRespectToImageAndProduct;
import nars.operator.Operation;
import nars.operator.Operator;
/**
* Operation execution and planning support.
* Strengthens and accelerates goal-reaching activity
*/
public class Executive implements Observer {
public final GraphExecutive graph;
public final Memory memory;
///** memory for faster execution of &/ statements (experiment) */
//public final Deque<TaskConceptContent> next = new ArrayDeque<>();
PriorityBuffer<TaskExecution> tasks;
private Set<TaskExecution> tasksToRemove = new HashSet();
public int shortTermMemorySize=10; //how many events its able to track for the temporal feedback system
//100 should be enough for all practical examples for now, we may make it adaptive later,
//which means adjusting according to the longest (&/,a1...an) =/> .. statement
public ArrayList<Task> lastEvents=new ArrayList<Task>();
/** number of tasks that are active in the sorted priority buffer for execution */
int numActiveTasks = 1;
/** max number of tasks that a plan can generate. chooses the N best */
int maxPlannedTasks = 1;
/** global plan search parameters */
float searchDepth = 64;
int particles = 32;
/** inline search parameters */
float inlineSearchDepth = 8;
int inlineParticles = 16;
float maxExecutionsPerDuration = 1f;
/** how much to multiply all cause relevancies per cycle */
double causeRelevancyFactor = 0.999;
/** how much to add value to each cause involved in a successful plan */
//TODO move this to a parameter class visible to both Executive and GraphExecutive
public static double relevancyOfSuccessfulPlan = 0.10;
/** time of last execution */
long lastExecution = -1;
/** motivation set on an executing task to prevent other tasks from interrupting it, unless they are relatively urgent.
* a larger value means it is more difficult for a new task to interrupt one which has
* already begun executing.
*/
float motivationToFinishCurrentExecution = 1.5f;
public Executive(Memory mem) {
this.memory = mem;
this.graph = new GraphExecutive(mem,this);
this.tasks = new PriorityBuffer<TaskExecution>(new Comparator<TaskExecution>() {
@Override
public final int compare(final TaskExecution a, final TaskExecution b) {
float ap = a.getDesire();
float bp = b.getDesire();
if (bp != ap) {
return Float.compare(ap, bp);
} else {
float ad = a.getPriority();
float bd = b.getPriority();
if (ad!=bd)
return Float.compare(ad, bd);
else {
float add = a.getDurability();
float bdd = b.getDurability();
return Float.compare(add, bdd);
}
}
}
}, numActiveTasks) {
@Override protected void reject(final TaskExecution t) {
removeTask(t);
}
};
memory.event.set(this, true, TaskDerive.class, ConceptBeliefRemove.class);
}
HashSet<Task> current_tasks=new HashSet<Task>();
@Override
public void event(Class event, Object[] args) {
if (event == TaskDerive.class) {
Task derivedTask=(Task) args[0];
if(derivedTask.sentence.content instanceof Implication &&
((Implication) derivedTask.sentence.content).getTemporalOrder()==TemporalRules.ORDER_FORWARD) {
if(!current_tasks.contains(derivedTask)) {
current_tasks.add(derivedTask);
}
}
}
else if (event == ConceptBeliefRemove.class) {
Task removedTask=(Task) args[2]; //task is 3nd
if(current_tasks.contains(removedTask)) {
current_tasks.remove(removedTask);
}
}
}
public class TaskExecution {
/** may be null for input tasks */
public final Concept c;
public final Task t;
public int sequence;
public long delayUntil = -1;
private float motivationFactor = 1;
public TaskExecution(final Concept concept, Task t) {
this.c = concept;
//Check if task is
if(Parameters.TEMPORAL_PARTICLE_PLANNER) {
Term term = t.getContent();
if (term instanceof Implication) {
Implication it = (Implication)term;
if ((it.getTemporalOrder() == TemporalRules.ORDER_FORWARD) || (it.getTemporalOrder() == TemporalRules.ORDER_CONCURRENT)) {
if (it.getSubject() instanceof Conjunction) {
t = inlineConjunction(t, (Conjunction)it.getSubject());
}
}
}
else if (term instanceof Conjunction) {
t = inlineConjunction(t, (Conjunction)term);
}
}
this.t = t;
}
protected Task inlineConjunction(Task t, final Conjunction c) {
ArrayDeque<Term> inlined = new ArrayDeque();
boolean modified = false;
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
Term prev = null;
for (Term e : c.term) {
if (!isPlanTerm(e)) {
if (graph.isPlannable(e)) {
TreeSet<ParticlePlan> plans = graph.particlePlan(e, inlineSearchDepth, inlineParticles);
if (plans.size() > 0) {
//use the first
ParticlePlan pp = plans.first();
//if terms precede this one, remove a common prefix
//scan from the end of the sequence backward until a term matches the previous, and splice it there
//TODO more rigorous prefix compraison. compare sublist prefix
List<Term> seq = pp.sequence;
// if (prev!=null) {
// int previousTermIndex = pp.sequence.lastIndexOf(prev);
//
// if (previousTermIndex!=-1) {
// if (previousTermIndex == seq.size()-1)
// seq = Collections.EMPTY_LIST;
// else {
// seq = seq.subList(previousTermIndex+1, seq.size());
// }
// }
// }
//System.out.println("inline: " + pp.sequence + " -> " + seq);
inlined.addAll(seq);
//System.err.println("Inline " + e + " in " + t.getContent() + " = " + pp.sequence);
modified = true;
}
else {
//no plan available, this wont be able to execute
setMotivationFactor(0);
}
}
else {
//this won't be able to execute here
setMotivationFactor(0);
}
}
else {
//executable term, add
inlined.add(e);
}
prev = e;
}
}
//remove suffix intervals
if (inlined.size() > 0) {
while (inlined.peekLast() instanceof Interval) {
inlined.removeLast();
modified = true;
}
}
if (inlined.isEmpty())
setMotivationFactor(0);
if (modified) {
Conjunction nc = c.cloneReplacingTerms(inlined.toArray(new Term[inlined.size()]));
t = t.clone(t.sentence.clone(nc) );
}
return t;
}
@Override public boolean equals(final Object obj) {
if (obj instanceof TaskExecution) {
return ((TaskExecution)obj).t.equals(t);
}
return false;
}
public final float getDesire() {
return t.getDesire().getExpectation() * motivationFactor;
}
public final float getPriority() { return t.getPriority(); }
public final float getDurability() { return t.getDurability(); }
//public final float getMotivation() { return getDesire() * getPriority() * motivationFactor; }
public final void setMotivationFactor(final float f) { this.motivationFactor = f; }
@Override public int hashCode() { return t.hashCode(); }
@Override
public String toString() {
return "!" + Texts.n2Slow(getDesire()) + "." + sequence + "! " + t.toString();
}
}
protected TaskExecution getExecution(final Task parent) {
for (final TaskExecution t : tasks) {
if (t.t.parentTask!=null)
if (t.t.parentTask.equals(parent))
return t;
}
return null;
}
public boolean addTask(final Concept c, final Task t) {
TaskExecution existingExecutable = getExecution(t.parentTask);
boolean valid = true;
if (existingExecutable!=null) {
//TODO compare motivation (desire * priority) instead?
//if the new task for the existin goal has a lower priority, ignore it
if (existingExecutable.getDesire() > t.getDesire().getExpectation()) {
//System.out.println("ignored lower priority task: " + t + " for parent " + t.parentTask);
valid = false;
}
//do not allow interrupting a lower priority, but already executing task
//TODO allow interruption if priority difference is above some threshold
if (existingExecutable.sequence > 0) {
//System.out.println("ignored late task: " + t + " for parent " + t.parentTask);
valid = false;
}
}
if (valid) {
if(!occured && this.expected_task!=null && ended) {
expected_task.expect(false); //ok this one didnt get his expectation
}
occured=false; //only bad to not happened not interrupted ones
ended=false;
final TaskExecution te = new TaskExecution(c, t);
if (tasks.add(te)) {
//added successfully
memory.emit(TaskExecution.class, te);
return true;
}
}
//t.end();
return false;
}
protected void removeTask(final TaskExecution t) {
if (tasksToRemove.add(t)) {
// if (memory.getRecorder().isActive())
// memory.getRecorder().output("Executive", "Task Remove: " + t.toString());
t.t.end();
}
}
protected void updateTasks() {
List<TaskExecution> t = new ArrayList(tasks);
t.removeAll(tasksToRemove);
tasks.clear();
for (TaskExecution x : t) {
if (x.getDesire() > 0) { // && (x.getPriority() > 0)) {
tasks.add(x);
//this is incompatible with the other usages of motivationFactor, so do not use this:
// if ((x.delayUntil!=-1) && (x.delayUntil <= memory.getTime())) {
// //restore motivation so task can resume processing
// x.motivationFactor = 1.0f;
// }
}
}
tasksToRemove.clear();
}
// public void manageExecution() {
//
// if (next.isEmpty()) {
// return;
// }
//
// TaskConceptContent n = next.pollFirst();
//
//
// if (n.task==null) {
// //we have to wait
// return;
// }
//
// if (!(n.content instanceof Operation)) {
// throw new RuntimeException("manageExecution: Term content is not Operation: " + n.content);
// }
//
// System.out.println("manageExecution: " + n.task);
//
// //ok it is time for action:
// execute((Operation)n.content, n.concept, n.task, true);
// }
protected void execute(final Operation op, final Task task) {
Operator oper = op.getOperator();
//if (NAR.DEBUG)
//System.out.println("exe: " + task.getExplanation().trim());
op.setTask(task);
oper.call(op, memory);
//task.end(true);
}
public void decisionPlanning(final NAL nal, final Task t, final Concept concept) {
if (Parameters.TEMPORAL_PARTICLE_PLANNER) {
if (!isDesired(concept)) return;
boolean plannable = graph.isPlannable(t.getContent());
if (plannable) {
graph.plan(nal, concept, t, t.getContent(), particles, searchDepth, '!', maxPlannedTasks);
}
}
}
/** Entry point for all potentially executable tasks */
public void decisionMaking(final Task t, final Concept concept) {
if (isDesired(concept)) {
Term content = concept.term;
if (content instanceof Operation) {
addTask(concept, t);
}
else if (isSequenceConjunction(content)) {
addTask(concept, t);
}
}
else {
//t.end();
}
}
/** whether a concept's desire exceeds decision threshold */
public boolean isDesired(final Concept c) {
return (c.getDesire().getExpectation() >= memory.param.decisionThreshold.get());
}
/** called during each memory cycle */
public void cycle() {
long now = memory.time();
//only execute something no less than every duration time
if (now - lastExecution < (memory.param.duration.get()/maxExecutionsPerDuration) )
return;
lastExecution = now;
graph.implication.multiplyRelevancy(causeRelevancyFactor);
updateTasks();
updateSensors();
if (tasks.isEmpty())
return;
/*if (NAR.DEBUG)*/ {
//TODO make a print function
if (tasks.size() > 1) {
System.out.println("Tasks @ " + memory.time());
for (TaskExecution tcc : tasks)
System.out.println(" " + tcc.toString());
}
else {
System.out.println("Task @ " + memory.time() + ": " + tasks.get(0));
}
}
TaskExecution topExecution = tasks.getFirst();
Task top = topExecution.t;
Term term = top.getContent();
if (term instanceof Operation) {
execute((Operation)term, top); //directly execute
removeTask(topExecution);
return;
}
else if (term instanceof Conjunction) {
Conjunction c = (Conjunction)term;
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
executeConjunctionSequence(topExecution, c);
return;
}
}
else if (term instanceof Implication) {
Implication it = (Implication)term;
if ((it.getTemporalOrder() == TemporalRules.ORDER_FORWARD) || (it.getTemporalOrder() == TemporalRules.ORDER_CONCURRENT)) {
if (it.getSubject() instanceof Conjunction) {
Conjunction c = (Conjunction)it.getSubject();
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
executeConjunctionSequence(topExecution, c);
return;
}
}
else if (it.getSubject() instanceof Operation) {
execute((Operation)it.getSubject(), top); //directly execute
removeTask(topExecution);
return;
}
}
throw new RuntimeException("Unrecognized executable term: " + it.getSubject() + "[" + it.getSubject().getClass() + "] from " + top);
}
else {
//throw new RuntimeException("Unknown Task type: "+ top);
}
// //Example prediction
// if (memory.getCurrentBelief()!=null) {
// Term currentTerm = memory.getCurrentBelief().content;
// if (implication.containsVertex(currentTerm)) {
// particlePredict(currentTerm, 12, particles);
// }
// }
}
public static boolean isPlanTerm(final Term t) {
return ((t instanceof Interval) || (t instanceof Operation));
}
public static boolean isExecutableTerm(final Term t) {
return (t instanceof Operation) || isSequenceConjunction(t);
//task.sentence.content instanceof Operation || (task.sentence.content instanceof Conjunction && task.sentence.content.getTemporalOrder()==TemporalRules.ORDER_FORWARD)))
}
public static boolean isSequenceConjunction(final Term c) {
if (c instanceof Conjunction) {
Conjunction cc = ((Conjunction)c);
return ( cc.operator() == Symbols.NativeOperator.SEQUENCE );
//{
//return (cc.getTemporalOrder()==TemporalRules.ORDER_FORWARD) || (cc.getTemporalOrder()==TemporalRules.ORDER_CONCURRENT);
//}
}
return false;
}
public Task expected_task=null;
public Term expected_event=null;
boolean ended=false;
private void executeConjunctionSequence(final TaskExecution task, final Conjunction c) {
int s = task.sequence;
Term currentTerm = c.term[s];
long now = memory.time();
if (task.delayUntil > now) {
//not ready to execute next term
return;
}
if (currentTerm instanceof Operation) {
Concept conc=memory.concept(currentTerm);
execute((Operation)currentTerm, task.t);
task.delayUntil = now + memory.param.duration.get();
s++;
}
else if (currentTerm instanceof Interval) {
Interval ui = (Interval)currentTerm;
task.delayUntil = memory.time() + Interval.magnitudeToTime(ui.magnitude, memory.param.duration);
s++;
}
else {
System.err.println("Non-executable term in sequence: " + currentTerm + " in " + c + " from task " + task.t);
removeTask(task);
}
if (s == c.term.length) {
ended=true;
//completed task
task.t.end(true);
if(task.t.sentence.content instanceof Implication) {
expected_task=task.t;
expected_event=((Implication)task.t.sentence.content).getPredicate();
}
removeTask(task);
task.sequence=0;
}
else {
ended=false;
//still incomplete
task.sequence = s;
// task.setMotivationFactor(motivationToFinishCurrentExecution);
}
}
//check all predictive statements, match them with last events
public void temporalPredictionsAdapt() {
for(Task c : current_tasks) { //a =/> b or (&/ a1...an) =/> b
Term[] args=new Term[1];
Implication imp=(Implication) c.getContent();
args[0]=imp.getSubject();
if(imp.getSubject() instanceof Conjunction) {
Conjunction conj=(Conjunction) imp.getSubject();
if(conj.temporalOrder==TemporalRules.ORDER_FORWARD) {
args=conj.term; //in case of &/ this are the terms
}
}
int i=0;
boolean matched=true;
int off=0;
int expected_time=0;
for(i=0;i<args.length;i++) {
//just matching order for now, todo taking temporal time into account
//ok lets match the sequences:
if(args[i] instanceof Interval) {
off++;
continue;
}
if(!args[i].equals(lastEvents.get(i-off).sentence.content)) {
matched=false;
break;
}
}
//ok it matched, is the consequence also right?
if(matched) {
if(imp.getPredicate().equals(lastEvents.get(args.length).sentence.content)) { //it matched and same consequence, so positive evidence
c.sentence.truth=TruthFunctions.revision(c.sentence.truth, new TruthValue(1.0f,Parameters.DEFAULT_JUDGMENT_CONFIDENCE));
} else { //it matched and other consequence, so negative evidence
c.sentence.truth=TruthFunctions.revision(c.sentence.truth, new TruthValue(0.0f,Parameters.DEFAULT_JUDGMENT_CONFIDENCE));
} //todo use derived task with revision instead
}
}
}
public Task stmLast=null;
boolean occured=false;
public boolean inductionOnSucceedingEvents(final Task newEvent, NAL nal) {
if (newEvent == null || newEvent.sentence.stamp.getOccurrenceTime()==Stamp.ETERNAL || !isInputOrTriggeredOperation(newEvent,nal.mem))
return false;
if (stmLast!=null) {
if(equalSubTermsInRespectToImageAndProduct(newEvent.sentence.content,stmLast.sentence.content)) {
return false;
}
nal.setTheNewStamp(newEvent.sentence.stamp, stmLast.sentence.stamp, memory.time());
nal.setCurrentTask(newEvent);
Sentence currentBelief = stmLast.sentence;
nal.setCurrentBelief(currentBelief);
if(newEvent.getPriority()>Parameters.TEMPORAL_INDUCTION_MIN_PRIORITY) {
TemporalRules.temporalInduction(newEvent.sentence, currentBelief, nal);
}
}
//for this heuristic, only use input events & task effects of operations
if(newEvent.getPriority()>Parameters.TEMPORAL_INDUCTION_MIN_PRIORITY) {
if(Parameters.TEMPORAL_PARTICLE_PLANNER && this.expected_event!=null && this.expected_task!=null) {
if(newEvent.sentence.content.equals(this.expected_event)) {
this.expected_task.expect(true);
occured=true;
} //else {
// this.expected_task.expect(false);
// }
// this.expected_event=null;
// this.expected_task=null; //done i think//todo, refine, it could come in a specific time, also +4 on end of a (&/ plan has to be used
}
stmLast=newEvent;
lastEvents.add(newEvent);
temporalPredictionsAdapt();
while(lastEvents.size()>shortTermMemorySize) {
lastEvents.remove(0);
}
}
return true;
}
//is input or by the system triggered operation
public boolean isInputOrTriggeredOperation(final Task newEvent, Memory mem) {
if(!((newEvent.isInput() || Parameters.INTERNAL_EXPERIENCE_FULL) || (newEvent.getCause()!=null))) {
return false;
}
/*Term newcontent=newEvent.sentence.content;
if(newcontent instanceof Operation) {
Term pred=((Operation)newcontent).getPredicate();
if(pred.equals(mem.getOperator("^want")) || pred.equals(mem.getOperator("^believe"))) {
return false;
}
}*/
return true;
}
/*
public boolean isActionable(final Task newEvent, Memory mem) {
if(!((newEvent.isInput()))) {
return false;
}
Term newcontent=newEvent.sentence.content;
if(newcontent instanceof Operation) {
Term pred=((Operation)newcontent).getPredicate();
if(pred.equals(mem.getOperator("^want")) || pred.equals(mem.getOperator("^believe"))) {
return false;
}
}
return true;
}*/
// public static class TaskConceptContent {
//
// public final Task task;
// public final Concept concept;
// public final Term content;
//
// public static TaskConceptContent NULL = new TaskConceptContent();
//
// /** null placeholder */
// protected TaskConceptContent() {
// this.task = null;
// this.concept = null;
// this.content = null;
// }
//
// public TaskConceptContent(Task task, Concept concept, Term content) {
// this.task = task;
// this.concept = concept;
// this.content = content;
// }
//
// }
protected void updateSensors() {
memory.logic.PLAN_GRAPH_EDGE.commit(graph.implication.edgeSet().size());
memory.logic.PLAN_GRAPH_VERTEX.commit(graph.implication.vertexSet().size());
memory.logic.PLAN_TASK_EXECUTABLE.commit(tasks.size());
}
}
|
nars_java/nars/inference/Executive.java
|
package nars.inference;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import nars.core.EventEmitter.Observer;
import nars.core.Events.ConceptBeliefRemove;
import nars.core.Events.TaskDerive;
import nars.core.Memory;
import nars.core.Parameters;
import nars.entity.Concept;
import nars.entity.Sentence;
import nars.entity.Stamp;
import nars.entity.Task;
import nars.entity.TruthValue;
import nars.inference.GraphExecutive.ParticlePlan;
import nars.io.Symbols;
import nars.io.Texts;
import nars.io.buffer.PriorityBuffer;
import nars.language.Conjunction;
import nars.language.Implication;
import nars.language.Interval;
import nars.language.Term;
import static nars.language.Terms.equalSubTermsInRespectToImageAndProduct;
import nars.operator.Operation;
import nars.operator.Operator;
/**
* Operation execution and planning support.
* Strengthens and accelerates goal-reaching activity
*/
public class Executive implements Observer {
public final GraphExecutive graph;
public final Memory memory;
///** memory for faster execution of &/ statements (experiment) */
//public final Deque<TaskConceptContent> next = new ArrayDeque<>();
PriorityBuffer<TaskExecution> tasks;
private Set<TaskExecution> tasksToRemove = new HashSet();
public int shortTermMemorySize=10; //how many events its able to track for the temporal feedback system
//100 should be enough for all practical examples for now, we may make it adaptive later,
//which means adjusting according to the longest (&/,a1...an) =/> .. statement
public ArrayList<Task> lastEvents=new ArrayList<Task>();
/** number of tasks that are active in the sorted priority buffer for execution */
int numActiveTasks = 1;
/** max number of tasks that a plan can generate. chooses the N best */
int maxPlannedTasks = 1;
/** global plan search parameters */
float searchDepth = 64;
int particles = 32;
/** inline search parameters */
float inlineSearchDepth = 8;
int inlineParticles = 16;
float maxExecutionsPerDuration = 1f;
/** how much to multiply all cause relevancies per cycle */
double causeRelevancyFactor = 0.999;
/** how much to add value to each cause involved in a successful plan */
//TODO move this to a parameter class visible to both Executive and GraphExecutive
public static double relevancyOfSuccessfulPlan = 0.10;
/** time of last execution */
long lastExecution = -1;
/** motivation set on an executing task to prevent other tasks from interrupting it, unless they are relatively urgent.
* a larger value means it is more difficult for a new task to interrupt one which has
* already begun executing.
*/
float motivationToFinishCurrentExecution = 1.5f;
public Executive(Memory mem) {
this.memory = mem;
this.graph = new GraphExecutive(mem,this);
this.tasks = new PriorityBuffer<TaskExecution>(new Comparator<TaskExecution>() {
@Override
public final int compare(final TaskExecution a, final TaskExecution b) {
float ap = a.getDesire();
float bp = b.getDesire();
if (bp != ap) {
return Float.compare(ap, bp);
} else {
float ad = a.getPriority();
float bd = b.getPriority();
if (ad!=bd)
return Float.compare(ad, bd);
else {
float add = a.getDurability();
float bdd = b.getDurability();
return Float.compare(add, bdd);
}
}
}
}, numActiveTasks) {
@Override protected void reject(final TaskExecution t) {
removeTask(t);
}
};
memory.event.set(this, true, TaskDerive.class, ConceptBeliefRemove.class);
}
HashSet<Task> current_tasks=new HashSet<Task>();
@Override
public void event(Class event, Object[] args) {
if (event == TaskDerive.class) {
Task derivedTask=(Task) args[0];
if(derivedTask.sentence.content instanceof Implication &&
((Implication) derivedTask.sentence.content).getTemporalOrder()==TemporalRules.ORDER_FORWARD) {
if(!current_tasks.contains(derivedTask)) {
current_tasks.add(derivedTask);
}
}
}
else if (event == ConceptBeliefRemove.class) {
Task removedTask=(Task) args[2]; //task is 3nd
if(current_tasks.contains(removedTask)) {
current_tasks.remove(removedTask);
}
}
}
public class TaskExecution {
/** may be null for input tasks */
public final Concept c;
public final Task t;
public int sequence;
public long delayUntil = -1;
private float motivationFactor = 1;
public TaskExecution(final Concept concept, Task t) {
this.c = concept;
//Check if task is
if(Parameters.TEMPORAL_PARTICLE_PLANNER) {
Term term = t.getContent();
if (term instanceof Implication) {
Implication it = (Implication)term;
if ((it.getTemporalOrder() == TemporalRules.ORDER_FORWARD) || (it.getTemporalOrder() == TemporalRules.ORDER_CONCURRENT)) {
if (it.getSubject() instanceof Conjunction) {
t = inlineConjunction(t, (Conjunction)it.getSubject());
}
}
}
else if (term instanceof Conjunction) {
t = inlineConjunction(t, (Conjunction)term);
}
}
this.t = t;
}
protected Task inlineConjunction(Task t, final Conjunction c) {
ArrayDeque<Term> inlined = new ArrayDeque();
boolean modified = false;
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
Term prev = null;
for (Term e : c.term) {
if (!isPlanTerm(e)) {
if (graph.isPlannable(e)) {
TreeSet<ParticlePlan> plans = graph.particlePlan(e, inlineSearchDepth, inlineParticles);
if (plans.size() > 0) {
//use the first
ParticlePlan pp = plans.first();
//if terms precede this one, remove a common prefix
//scan from the end of the sequence backward until a term matches the previous, and splice it there
//TODO more rigorous prefix compraison. compare sublist prefix
List<Term> seq = pp.sequence;
// if (prev!=null) {
// int previousTermIndex = pp.sequence.lastIndexOf(prev);
//
// if (previousTermIndex!=-1) {
// if (previousTermIndex == seq.size()-1)
// seq = Collections.EMPTY_LIST;
// else {
// seq = seq.subList(previousTermIndex+1, seq.size());
// }
// }
// }
//System.out.println("inline: " + pp.sequence + " -> " + seq);
inlined.addAll(seq);
//System.err.println("Inline " + e + " in " + t.getContent() + " = " + pp.sequence);
modified = true;
}
else {
//no plan available, this wont be able to execute
setMotivationFactor(0);
}
}
else {
//this won't be able to execute here
setMotivationFactor(0);
}
}
else {
//executable term, add
inlined.add(e);
}
prev = e;
}
}
//remove suffix intervals
if (inlined.size() > 0) {
while (inlined.peekLast() instanceof Interval) {
inlined.removeLast();
modified = true;
}
}
if (inlined.isEmpty())
setMotivationFactor(0);
if (modified) {
Conjunction nc = c.cloneReplacingTerms(inlined.toArray(new Term[inlined.size()]));
t = t.clone(t.sentence.clone(nc) );
}
return t;
}
@Override public boolean equals(final Object obj) {
if (obj instanceof TaskExecution) {
return ((TaskExecution)obj).t.equals(t);
}
return false;
}
public final float getDesire() {
return t.getDesire().getExpectation() * motivationFactor;
}
public final float getPriority() { return t.getPriority(); }
public final float getDurability() { return t.getDurability(); }
//public final float getMotivation() { return getDesire() * getPriority() * motivationFactor; }
public final void setMotivationFactor(final float f) { this.motivationFactor = f; }
@Override public int hashCode() { return t.hashCode(); }
@Override
public String toString() {
return "!" + Texts.n2Slow(getDesire()) + "." + sequence + "! " + t.toString();
}
}
protected TaskExecution getExecution(final Task parent) {
for (final TaskExecution t : tasks) {
if (t.t.parentTask!=null)
if (t.t.parentTask.equals(parent))
return t;
}
return null;
}
public boolean addTask(final Concept c, final Task t) {
TaskExecution existingExecutable = getExecution(t.parentTask);
boolean valid = true;
if (existingExecutable!=null) {
//TODO compare motivation (desire * priority) instead?
//if the new task for the existin goal has a lower priority, ignore it
if (existingExecutable.getDesire() > t.getDesire().getExpectation()) {
//System.out.println("ignored lower priority task: " + t + " for parent " + t.parentTask);
valid = false;
}
//do not allow interrupting a lower priority, but already executing task
//TODO allow interruption if priority difference is above some threshold
if (existingExecutable.sequence > 0) {
//System.out.println("ignored late task: " + t + " for parent " + t.parentTask);
valid = false;
}
}
if (valid) {
if(!occured && this.expected_task!=null && ended) {
expected_task.expect(false); //ok this one didnt get his expectation
}
occured=false; //only bad to not happened not interrupted ones
ended=false;
final TaskExecution te = new TaskExecution(c, t);
if (tasks.add(te)) {
//added successfully
memory.emit(TaskExecution.class, te);
return true;
}
}
//t.end();
return false;
}
protected void removeTask(final TaskExecution t) {
if (tasksToRemove.add(t)) {
// if (memory.getRecorder().isActive())
// memory.getRecorder().output("Executive", "Task Remove: " + t.toString());
t.t.end();
}
}
protected void updateTasks() {
List<TaskExecution> t = new ArrayList(tasks);
t.removeAll(tasksToRemove);
tasks.clear();
for (TaskExecution x : t) {
if (x.getDesire() > 0) { // && (x.getPriority() > 0)) {
tasks.add(x);
//this is incompatible with the other usages of motivationFactor, so do not use this:
// if ((x.delayUntil!=-1) && (x.delayUntil <= memory.getTime())) {
// //restore motivation so task can resume processing
// x.motivationFactor = 1.0f;
// }
}
}
tasksToRemove.clear();
}
// public void manageExecution() {
//
// if (next.isEmpty()) {
// return;
// }
//
// TaskConceptContent n = next.pollFirst();
//
//
// if (n.task==null) {
// //we have to wait
// return;
// }
//
// if (!(n.content instanceof Operation)) {
// throw new RuntimeException("manageExecution: Term content is not Operation: " + n.content);
// }
//
// System.out.println("manageExecution: " + n.task);
//
// //ok it is time for action:
// execute((Operation)n.content, n.concept, n.task, true);
// }
protected void execute(final Operation op, final Task task) {
Operator oper = op.getOperator();
//if (NAR.DEBUG)
//System.out.println("exe: " + task.getExplanation().trim());
op.setTask(task);
oper.call(op, memory);
//task.end(true);
}
public void decisionPlanning(final NAL nal, final Task t, final Concept concept) {
if (Parameters.TEMPORAL_PARTICLE_PLANNER) {
if (!isDesired(concept)) return;
boolean plannable = graph.isPlannable(t.getContent());
if (plannable) {
graph.plan(nal, concept, t, t.getContent(), particles, searchDepth, '!', maxPlannedTasks);
}
}
}
/** Entry point for all potentially executable tasks */
public void decisionMaking(final Task t, final Concept concept) {
if (isDesired(concept)) {
Term content = concept.term;
if (content instanceof Operation) {
addTask(concept, t);
}
else if (isSequenceConjunction(content)) {
addTask(concept, t);
}
}
else {
//t.end();
}
}
/** whether a concept's desire exceeds decision threshold */
public boolean isDesired(final Concept c) {
return (c.getDesire().getExpectation() >= memory.param.decisionThreshold.get());
}
/** called during each memory cycle */
public void cycle() {
long now = memory.time();
//only execute something no less than every duration time
if (now - lastExecution < (memory.param.duration.get()/maxExecutionsPerDuration) )
return;
lastExecution = now;
graph.implication.multiplyRelevancy(causeRelevancyFactor);
updateTasks();
updateSensors();
if (tasks.isEmpty())
return;
/*if (NAR.DEBUG)*/ {
//TODO make a print function
if (tasks.size() > 1) {
System.out.println("Tasks @ " + memory.time());
for (TaskExecution tcc : tasks)
System.out.println(" " + tcc.toString());
}
else {
System.out.println("Task @ " + memory.time() + ": " + tasks.get(0));
}
}
TaskExecution topExecution = tasks.getFirst();
Task top = topExecution.t;
Term term = top.getContent();
if (term instanceof Operation) {
execute((Operation)term, top); //directly execute
removeTask(topExecution);
return;
}
else if (term instanceof Conjunction) {
Conjunction c = (Conjunction)term;
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
executeConjunctionSequence(topExecution, c);
return;
}
}
else if (term instanceof Implication) {
Implication it = (Implication)term;
if ((it.getTemporalOrder() == TemporalRules.ORDER_FORWARD) || (it.getTemporalOrder() == TemporalRules.ORDER_CONCURRENT)) {
if (it.getSubject() instanceof Conjunction) {
Conjunction c = (Conjunction)it.getSubject();
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
executeConjunctionSequence(topExecution, c);
return;
}
}
else if (it.getSubject() instanceof Operation) {
execute((Operation)it.getSubject(), top); //directly execute
removeTask(topExecution);
return;
}
}
throw new RuntimeException("Unrecognized executable term: " + it.getSubject() + "[" + it.getSubject().getClass() + "] from " + top);
}
else {
//throw new RuntimeException("Unknown Task type: "+ top);
}
// //Example prediction
// if (memory.getCurrentBelief()!=null) {
// Term currentTerm = memory.getCurrentBelief().content;
// if (implication.containsVertex(currentTerm)) {
// particlePredict(currentTerm, 12, particles);
// }
// }
}
public static boolean isPlanTerm(final Term t) {
return ((t instanceof Interval) || (t instanceof Operation));
}
public static boolean isExecutableTerm(final Term t) {
return (t instanceof Operation) || isSequenceConjunction(t);
//task.sentence.content instanceof Operation || (task.sentence.content instanceof Conjunction && task.sentence.content.getTemporalOrder()==TemporalRules.ORDER_FORWARD)))
}
public static boolean isSequenceConjunction(final Term c) {
if (c instanceof Conjunction) {
Conjunction cc = ((Conjunction)c);
return ( cc.operator() == Symbols.NativeOperator.SEQUENCE );
//{
//return (cc.getTemporalOrder()==TemporalRules.ORDER_FORWARD) || (cc.getTemporalOrder()==TemporalRules.ORDER_CONCURRENT);
//}
}
return false;
}
public Task expected_task=null;
public Term expected_event=null;
boolean ended=false;
private void executeConjunctionSequence(final TaskExecution task, final Conjunction c) {
int s = task.sequence;
Term currentTerm = c.term[s];
long now = memory.time();
if (task.delayUntil > now) {
//not ready to execute next term
return;
}
if (currentTerm instanceof Operation) {
Concept conc=memory.concept(currentTerm);
execute((Operation)currentTerm, task.t);
task.delayUntil = now + memory.param.duration.get();
s++;
}
else if (currentTerm instanceof Interval) {
Interval ui = (Interval)currentTerm;
task.delayUntil = memory.time() + Interval.magnitudeToTime(ui.magnitude, memory.param.duration);
s++;
}
else {
System.err.println("Non-executable term in sequence: " + currentTerm + " in " + c + " from task " + task.t);
removeTask(task);
}
if (s == c.term.length) {
ended=true;
//completed task
task.t.end(true);
if(task.t.sentence.content instanceof Implication) {
expected_task=task.t;
expected_event=((Implication)task.t.sentence.content).getPredicate();
}
removeTask(task);
task.sequence=0;
}
else {
ended=false;
//still incomplete
task.sequence = s;
// task.setMotivationFactor(motivationToFinishCurrentExecution);
}
}
//check all predictive statements, match them with last events
public void temporalPredictionsAdapt() {
for(Task c : current_tasks) { //a =/> b or (&/ a1...an) =/> b
Term[] args=new Term[1];
Implication imp=(Implication) c.getContent();
args[0]=imp.getSubject();
if(imp.getSubject() instanceof Conjunction) {
Conjunction conj=(Conjunction) imp.getSubject();
if(conj.temporalOrder==TemporalRules.ORDER_FORWARD) {
args=conj.term; //in case of &/ this are the terms
}
}
int i=0;
boolean matched=true;
int off=0;
for(i=0;i<args.length;i++) {
//just matching order for now, todo taking temporal time into account
//ok lets match the sequences:
if(args[i] instanceof Interval) {
off++;
continue;
}
if(!args[i].equals(lastEvents.get(i-off))) {
matched=false;
break;
}
}
//ok it matched, is the consequence also right?
if(matched) {
if(imp.getPredicate().equals(lastEvents.get(args.length))) { //it matched and same consequence, so positive evidence
c.sentence.truth=TruthFunctions.revision(c.sentence.truth, new TruthValue(1.0f,Parameters.DEFAULT_JUDGMENT_CONFIDENCE));
} else { //it matched and other consequence, so negative evidence
c.sentence.truth=TruthFunctions.revision(c.sentence.truth, new TruthValue(0.0f,Parameters.DEFAULT_JUDGMENT_CONFIDENCE));
} //todo use derived task with revision instead
}
}
}
public Task stmLast=null;
boolean occured=false;
public boolean inductionOnSucceedingEvents(final Task newEvent, NAL nal) {
if (newEvent == null || newEvent.sentence.stamp.getOccurrenceTime()==Stamp.ETERNAL || !isInputOrTriggeredOperation(newEvent,nal.mem))
return false;
if (stmLast!=null) {
if(equalSubTermsInRespectToImageAndProduct(newEvent.sentence.content,stmLast.sentence.content)) {
return false;
}
nal.setTheNewStamp(newEvent.sentence.stamp, stmLast.sentence.stamp, memory.time());
nal.setCurrentTask(newEvent);
Sentence currentBelief = stmLast.sentence;
nal.setCurrentBelief(currentBelief);
if(newEvent.getPriority()>Parameters.TEMPORAL_INDUCTION_MIN_PRIORITY) {
TemporalRules.temporalInduction(newEvent.sentence, currentBelief, nal);
}
}
//for this heuristic, only use input events & task effects of operations
if(newEvent.getPriority()>Parameters.TEMPORAL_INDUCTION_MIN_PRIORITY) {
if(Parameters.TEMPORAL_PARTICLE_PLANNER && this.expected_event!=null && this.expected_task!=null) {
if(newEvent.sentence.content.equals(this.expected_event)) {
this.expected_task.expect(true);
occured=true;
} //else {
// this.expected_task.expect(false);
// }
// this.expected_event=null;
// this.expected_task=null; //done i think//todo, refine, it could come in a specific time, also +4 on end of a (&/ plan has to be used
}
stmLast=newEvent;
lastEvents.add(newEvent);
temporalPredictionsAdapt();
while(lastEvents.size()>shortTermMemorySize) {
lastEvents.remove(0);
}
}
return true;
}
//is input or by the system triggered operation
public boolean isInputOrTriggeredOperation(final Task newEvent, Memory mem) {
if(!((newEvent.isInput() || Parameters.INTERNAL_EXPERIENCE_FULL) || (newEvent.getCause()!=null))) {
return false;
}
/*Term newcontent=newEvent.sentence.content;
if(newcontent instanceof Operation) {
Term pred=((Operation)newcontent).getPredicate();
if(pred.equals(mem.getOperator("^want")) || pred.equals(mem.getOperator("^believe"))) {
return false;
}
}*/
return true;
}
/*
public boolean isActionable(final Task newEvent, Memory mem) {
if(!((newEvent.isInput()))) {
return false;
}
Term newcontent=newEvent.sentence.content;
if(newcontent instanceof Operation) {
Term pred=((Operation)newcontent).getPredicate();
if(pred.equals(mem.getOperator("^want")) || pred.equals(mem.getOperator("^believe"))) {
return false;
}
}
return true;
}*/
// public static class TaskConceptContent {
//
// public final Task task;
// public final Concept concept;
// public final Term content;
//
// public static TaskConceptContent NULL = new TaskConceptContent();
//
// /** null placeholder */
// protected TaskConceptContent() {
// this.task = null;
// this.concept = null;
// this.content = null;
// }
//
// public TaskConceptContent(Task task, Concept concept, Term content) {
// this.task = task;
// this.concept = concept;
// this.content = content;
// }
//
// }
protected void updateSensors() {
memory.logic.PLAN_GRAPH_EDGE.commit(graph.implication.edgeSet().size());
memory.logic.PLAN_GRAPH_VERTEX.commit(graph.implication.vertexSet().size());
memory.logic.PLAN_TASK_EXECUTABLE.commit(tasks.size());
}
}
|
commit. a fix of match (so not only negative adjustment). TODO: Temporal
|
nars_java/nars/inference/Executive.java
|
commit. a fix of match (so not only negative adjustment). TODO: Temporal
|
|
Java
|
agpl-3.0
|
53f509f62cb4072a341eedda2ae83fe256c1556f
| 0
|
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
3a088684-2e61-11e5-9284-b827eb9e62be
|
hello.java
|
3a02e3a0-2e61-11e5-9284-b827eb9e62be
|
3a088684-2e61-11e5-9284-b827eb9e62be
|
hello.java
|
3a088684-2e61-11e5-9284-b827eb9e62be
|
|
Java
|
lgpl-2.1
|
2d99054226ae728cffe7d407dfff1b2b800089cc
| 0
|
0x0000-dot-ru/jcommune,NCNecros/jcommune,illerax/jcommune,shevarnadze/jcommune,a-nigredo/jcommune,shevarnadze/jcommune,NCNecros/jcommune,Relvl/jcommune,Vitalij-Voronkoff/jcommune,0x0000-dot-ru/jcommune,a-nigredo/jcommune,SurfVaporizer/jcommune,oatkachenko/jcommune,Noctrunal/jcommune,jtalks-org/jcommune,vps2/jcommune,mihnayan/jcommune,SurfVaporizer/jcommune,Noctrunal/jcommune,shevarnadze/jcommune,vps2/jcommune,despc/jcommune,Z00M/jcommune,despc/jcommune,Vitalij-Voronkoff/jcommune,SurfVaporizer/jcommune,oatkachenko/jcommune,CocoJumbo/jcommune,Relvl/jcommune,mihnayan/jcommune,vps2/jcommune,illerax/jcommune,NCNecros/jcommune,despc/jcommune,Z00M/jcommune,jtalks-org/jcommune,CocoJumbo/jcommune,illerax/jcommune,Vitalij-Voronkoff/jcommune,CocoJumbo/jcommune,Relvl/jcommune,oatkachenko/jcommune,jtalks-org/jcommune,Noctrunal/jcommune,mihnayan/jcommune,Z00M/jcommune,a-nigredo/jcommune
|
/**
* Copyright (C) 2011 JTalks.org Team
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jtalks.jcommune.service.transactional;
import org.jtalks.common.model.entity.Component;
import org.jtalks.jcommune.model.dao.ExternalLinkDao;
import org.jtalks.jcommune.model.entity.ExternalLink;
import org.jtalks.jcommune.service.ExternalLinkService;
import org.springframework.security.access.prepost.PreAuthorize;
import java.util.List;
/**
* @author Alexandre Teterin
* Date: 03.02.13
*/
public class TransactionalExternalLinkService extends AbstractTransactionalEntityService<ExternalLink, ExternalLinkDao>
implements ExternalLinkService {
/**
* Subclass may use this constructor to store entity DAO or parent
* entity DAO if necessary
*
* @param dao subclass-provided dao object
*/
public TransactionalExternalLinkService(ExternalLinkDao dao) {
super(dao);
}
/**
* {@inheritDoc}
*/
@Override
public List<ExternalLink> getLinks() {
return getDao().getAll();
}
/**
* {@inheritDoc}
*/
@Override
@PreAuthorize("hasPermission(#forumComponent.id, 'COMPONENT', 'GeneralPermission.ADMIN')")
public void saveLink(ExternalLink link, Component forumComponent) {
getDao().saveOrUpdate(link);
}
/**
* {@inheritDoc}
*/
@Override
@PreAuthorize("hasPermission(#forumComponent.id, 'COMPONENT', 'GeneralPermission.ADMIN')")
public boolean deleteLink(long id, Component forumComponent) {
return getDao().delete(id);
}
}
|
jcommune-service/src/main/java/org/jtalks/jcommune/service/transactional/TransactionalExternalLinkService.java
|
/**
* Copyright (C) 2011 JTalks.org Team
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jtalks.jcommune.service.transactional;
import org.jtalks.common.model.entity.Component;
import org.jtalks.jcommune.model.dao.ExternalLinkDao;
import org.jtalks.jcommune.model.entity.ExternalLink;
import org.jtalks.jcommune.service.ExternalLinkService;
import org.springframework.security.access.prepost.PreAuthorize;
import java.util.List;
/**
* @author Alexandre Teterin
* Date: 03.02.13
*/
public class TransactionalExternalLinkService extends AbstractTransactionalEntityService<ExternalLink, ExternalLinkDao>
implements ExternalLinkService {
/**
* Subclass may use this constructor to store entity DAO or parent
* entity DAO if necessary
*
* @param dao subclass-provided dao object
*/
public TransactionalExternalLinkService(ExternalLinkDao dao) {
super(dao);
}
@Override
public List<ExternalLink> getLinks() {
return getDao().getAll();
}
@Override
@PreAuthorize("hasPermission(#forumComponent.id, 'COMPONENT', 'GeneralPermission.ADMIN')")
public void saveLink(ExternalLink link, Component forumComponent) {
getDao().saveOrUpdate(link);
}
@Override
@PreAuthorize("hasPermission(#forumComponent.id, 'COMPONENT', 'GeneralPermission.ADMIN')")
public boolean deleteLink(long id, Component forumComponent) {
return getDao().delete(id);
}
}
|
#JC-1251 Added service javadoc.
|
jcommune-service/src/main/java/org/jtalks/jcommune/service/transactional/TransactionalExternalLinkService.java
|
#JC-1251 Added service javadoc.
|
|
Java
|
lgpl-2.1
|
8e6007983fd1e43f1051f4fb1711f10d0d1b4a88
| 0
|
zwobit/exist,wolfgangmm/exist,ljo/exist,dizzzz/exist,ambs/exist,jessealama/exist,hungerburg/exist,jensopetersen/exist,wolfgangmm/exist,hungerburg/exist,olvidalo/exist,RemiKoutcherawy/exist,joewiz/exist,opax/exist,patczar/exist,lcahlander/exist,jessealama/exist,wshager/exist,eXist-db/exist,windauer/exist,dizzzz/exist,wolfgangmm/exist,jensopetersen/exist,adamretter/exist,MjAbuz/exist,adamretter/exist,dizzzz/exist,ambs/exist,windauer/exist,windauer/exist,opax/exist,RemiKoutcherawy/exist,olvidalo/exist,shabanovd/exist,RemiKoutcherawy/exist,jensopetersen/exist,ambs/exist,eXist-db/exist,wolfgangmm/exist,joewiz/exist,wshager/exist,joewiz/exist,hungerburg/exist,ljo/exist,ljo/exist,joewiz/exist,lcahlander/exist,hungerburg/exist,MjAbuz/exist,hungerburg/exist,dizzzz/exist,zwobit/exist,olvidalo/exist,MjAbuz/exist,ambs/exist,ambs/exist,zwobit/exist,opax/exist,shabanovd/exist,adamretter/exist,patczar/exist,adamretter/exist,RemiKoutcherawy/exist,lcahlander/exist,shabanovd/exist,joewiz/exist,wshager/exist,lcahlander/exist,wshager/exist,jensopetersen/exist,jensopetersen/exist,wshager/exist,joewiz/exist,jessealama/exist,olvidalo/exist,patczar/exist,windauer/exist,ljo/exist,wshager/exist,windauer/exist,jessealama/exist,kohsah/exist,patczar/exist,RemiKoutcherawy/exist,zwobit/exist,kohsah/exist,jessealama/exist,MjAbuz/exist,lcahlander/exist,olvidalo/exist,adamretter/exist,ljo/exist,wolfgangmm/exist,MjAbuz/exist,shabanovd/exist,lcahlander/exist,shabanovd/exist,eXist-db/exist,eXist-db/exist,ljo/exist,zwobit/exist,eXist-db/exist,wolfgangmm/exist,RemiKoutcherawy/exist,patczar/exist,jessealama/exist,MjAbuz/exist,ambs/exist,patczar/exist,kohsah/exist,jensopetersen/exist,shabanovd/exist,opax/exist,zwobit/exist,adamretter/exist,kohsah/exist,opax/exist,kohsah/exist,windauer/exist,dizzzz/exist,kohsah/exist,eXist-db/exist,dizzzz/exist
|
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001, Wolfgang M. Meier (meier@ifs.tu-darmstadt.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Id$
*/
package org.exist;
import org.exist.cluster.ClusterComunication;
import org.exist.cluster.ClusterException;
import org.exist.storage.BrokerPool;
import org.exist.util.Configuration;
import org.exist.util.SingleInstanceConfiguration;
import org.exist.validation.XmlLibraryChecker;
import org.exist.xmldb.DatabaseImpl;
import org.exist.xmldb.ShutdownListener;
import org.mortbay.http.HttpContext;
import org.mortbay.http.HttpListener;
import org.mortbay.jetty.Server;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Database;
import java.util.Timer;
import java.util.TimerTask;
import org.mortbay.util.MultiException;
/**
* This class provides a main method to start Jetty with eXist. It registers shutdown
* handlers to cleanly shut down the database and the webserver.
* If database is NATIVE-CLUSTER, Clustercomunication is configured and started.
*
* @author wolf
*/
public class JettyStart {
public static void main(String[] args) {
JettyStart start = new JettyStart();
start.run(args);
}
public JettyStart() {
// Additional checks XML libs @@@@
XmlLibraryChecker.check();
}
public void run(String[] args) {
if (args.length == 0) {
System.out.println("No configuration file specified!");
return;
}
String shutdownHookOption = System.getProperty("exist.register-shutdown-hook", "true");
boolean registerShutdownHook = shutdownHookOption.equals("true");
// configure database
System.out.println("Configuring eXist from " + SingleInstanceConfiguration.getPath());
try {
// we register our own shutdown hook
BrokerPool.setRegisterShutdownHook(false);
// configure the database instance
SingleInstanceConfiguration config;
if (args.length == 2)
config = new SingleInstanceConfiguration(args[1]);
else
config = new SingleInstanceConfiguration();
BrokerPool.configure(1, 5, config);
// register the XMLDB driver
Database xmldb = new DatabaseImpl();
xmldb.setProperty("create-database", "false");
DatabaseManager.registerDatabase(xmldb);
configureCluster(config);
} catch (Exception e) {
System.err.println("configuration error: " + e.getMessage());
e.printStackTrace();
return;
}
// start Jetty
final Server server;
int port = 8080;
try {
server = new Server(args[0]);
BrokerPool.getInstance().registerShutdownListener(new ShutdownListenerImpl(server));
server.start();
HttpListener[] listeners = server.getListeners();
if (listeners.length > 0)
port = listeners[0].getPort();
HttpContext[] contexts = server.getContexts();
System.out.println("-----------------------------------------------------");
System.out.println("Server has started on port " + port + ". Configured contexts:");
for (int i = 0; i < contexts.length; i++) {
System.out.println("http://localhost:" + port + contexts[i].getContextPath());
}
System.out.println("-----------------------------------------------------");
if (registerShutdownHook) {
// register a shutdown hook for the server
Thread hook = new Thread() {
public void run() {
setName("Shutdown");
BrokerPool.stopAll(true);
try {
server.stop();
} catch (InterruptedException e) {
}
try {
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
}
};
Runtime.getRuntime().addShutdownHook(hook);
}
} catch (MultiException e) {
boolean hasBindException=false;
for(Object t : e.getExceptions()){
if(t instanceof java.net.BindException){
hasBindException=true;
System.out.println("----------------------------------------------------------");
System.out.println("ERROR: Could not start jetty, port "
+ port + " is already in use. ");
System.out.println(t.toString());
System.out.println("----------------------------------------------------------");
}
}
// If it is another error, print stacktrace
if(!hasBindException){
e.printStackTrace();
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void shutdown() {
BrokerPool.stopAll(false);
}
/**
* This class gets called after the database received a shutdown request.
*
* @author wolf
*/
private static class ShutdownListenerImpl implements ShutdownListener {
private Server server;
public ShutdownListenerImpl(Server server) {
this.server = server;
}
public void shutdown(String dbname, int remainingInstances) {
System.err.println("Database shutdown: stopping server in 1sec ...");
if (remainingInstances == 0) {
// give the webserver a 1s chance to complete open requests
Timer timer = new Timer();
timer.schedule(new TimerTask() {
public void run() {
try {
// stop the server
server.stop();
ClusterComunication cluster = ClusterComunication.getInstance();
if(cluster!=null){
cluster.stop();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
System.exit(0);
}
}, 1000);
}
}
}
private void configureCluster(Configuration c) throws ClusterException {
String database = (String)c.getProperty("database");
if(! database.equalsIgnoreCase("NATIVE_CLUSTER"))
return;
ClusterComunication.configure(c);
}
}
|
src/org/exist/JettyStart.java
|
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001, Wolfgang M. Meier (meier@ifs.tu-darmstadt.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Id$
*/
package org.exist;
import org.exist.cluster.ClusterComunication;
import org.exist.cluster.ClusterException;
import org.exist.storage.BrokerPool;
import org.exist.util.Configuration;
import org.exist.util.SingleInstanceConfiguration;
import org.exist.validation.XmlLibraryChecker;
import org.exist.xmldb.DatabaseImpl;
import org.exist.xmldb.ShutdownListener;
import org.mortbay.http.HttpContext;
import org.mortbay.http.HttpListener;
import org.mortbay.jetty.Server;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Database;
import java.util.Timer;
import java.util.TimerTask;
/**
* This class provides a main method to start Jetty with eXist. It registers shutdown
* handlers to cleanly shut down the database and the webserver.
* If database is NATIVE-CLUSTER, Clustercomunication is configured and started.
*
* @author wolf
*/
public class JettyStart {
public static void main(String[] args) {
JettyStart start = new JettyStart();
start.run(args);
}
public JettyStart() {
// Additional checks XML libs @@@@
XmlLibraryChecker.check();
}
public void run(String[] args) {
if (args.length == 0) {
System.out.println("No configuration file specified!");
return;
}
String shutdownHookOption = System.getProperty("exist.register-shutdown-hook", "true");
boolean registerShutdownHook = shutdownHookOption.equals("true");
// configure database
System.out.println("Configuring eXist from " + SingleInstanceConfiguration.getPath());
try {
// we register our own shutdown hook
BrokerPool.setRegisterShutdownHook(false);
// configure the database instance
SingleInstanceConfiguration config;
if (args.length == 2)
config = new SingleInstanceConfiguration(args[1]);
else
config = new SingleInstanceConfiguration();
BrokerPool.configure(1, 5, config);
// register the XMLDB driver
Database xmldb = new DatabaseImpl();
xmldb.setProperty("create-database", "false");
DatabaseManager.registerDatabase(xmldb);
configureCluster(config);
} catch (Exception e) {
System.err.println("configuration error: " + e.getMessage());
e.printStackTrace();
return;
}
// start Jetty
final Server server;
try {
server = new Server(args[0]);
BrokerPool.getInstance().registerShutdownListener(new ShutdownListenerImpl(server));
server.start();
HttpListener[] listeners = server.getListeners();
int port = 8080;
if (listeners.length > 0)
port = listeners[0].getPort();
HttpContext[] contexts = server.getContexts();
System.out.println("-----------------------------------------------------");
System.out.println("Server has started on port " + port + ". Configured contexts:");
for (int i = 0; i < contexts.length; i++) {
System.out.println("http://localhost:" + port + contexts[i].getContextPath());
}
System.out.println("-----------------------------------------------------");
if (registerShutdownHook) {
// register a shutdown hook for the server
Thread hook = new Thread() {
public void run() {
setName("Shutdown");
BrokerPool.stopAll(true);
try {
server.stop();
} catch (InterruptedException e) {
}
try {
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
}
};
Runtime.getRuntime().addShutdownHook(hook);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void shutdown() {
BrokerPool.stopAll(false);
}
/**
* This class gets called after the database received a shutdown request.
*
* @author wolf
*/
private static class ShutdownListenerImpl implements ShutdownListener {
private Server server;
public ShutdownListenerImpl(Server server) {
this.server = server;
}
public void shutdown(String dbname, int remainingInstances) {
System.err.println("Database shutdown: stopping server in 1sec ...");
if (remainingInstances == 0) {
// give the webserver a 1s chance to complete open requests
Timer timer = new Timer();
timer.schedule(new TimerTask() {
public void run() {
try {
// stop the server
server.stop();
ClusterComunication cluster = ClusterComunication.getInstance();
if(cluster!=null){
cluster.stop();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
System.exit(0);
}
}, 1000);
}
}
}
private void configureCluster(Configuration c) throws ClusterException {
String database = (String)c.getProperty("database");
if(! database.equalsIgnoreCase("NATIVE_CLUSTER"))
return;
ClusterComunication.configure(c);
}
}
|
[bugfix] Made error more readable when jetty port cannot be claimed.
svn path=/trunk/eXist/; revision=8617
|
src/org/exist/JettyStart.java
|
[bugfix] Made error more readable when jetty port cannot be claimed.
|
|
Java
|
unlicense
|
745f3f28210701f4eb3c165a272f8f029bcdf79b
| 0
|
indy256/codelibrary,indy256/codelibrary,indy256/codelibrary,indy256/codelibrary
|
package strings;
import java.util.Random;
// https://en.wikipedia.org/wiki/Knuth–Morris–Pratt_algorithm
public class Kmp {
public static int[] prefixFunction(String s) {
int[] p = new int[s.length()];
int k = 0;
for (int i = 1; i < s.length(); i++) {
while (k > 0 && s.charAt(k) != s.charAt(i))
k = p[k - 1];
if (s.charAt(k) == s.charAt(i))
++k;
p[i] = k;
}
return p;
}
public static int findSubstring(String haystack, String needle) {
int m = needle.length();
if (m == 0)
return 0;
int[] p = prefixFunction(needle);
for (int i = 0, k = 0; i < haystack.length(); i++) {
while (k > 0 && needle.charAt(k) != haystack.charAt(i))
k = p[k - 1];
if (needle.charAt(k) == haystack.charAt(i))
++k;
if (k == m)
return i + 1 - m;
}
return -1;
}
// random tests
public static void main(String[] args) {
Random rnd = new Random(1);
for (int step = 0; step < 10_000; step++) {
String s = getRandomString(rnd, 100);
String pattern = getRandomString(rnd, 5);
int pos1 = findSubstring(s, pattern);
int pos2 = s.indexOf(pattern);
if (pos1 != pos2)
throw new RuntimeException();
}
}
static String getRandomString(Random rnd, int maxlen) {
int n = rnd.nextInt(maxlen);
char[] s = new char[n];
for (int i = 0; i < n; i++)
s[i] = (char) ('a' + rnd.nextInt(3));
return new String(s);
}
}
|
java/src/strings/Kmp.java
|
package strings;
import java.util.Random;
// https://en.wikipedia.org/wiki/Knuth–Morris–Pratt_algorithm
public class Kmp {
public static int[] prefixFunction(String s) {
int[] p = new int[s.length()];
int k = 0;
for (int i = 1; i < s.length(); i++) {
while (k > 0 && s.charAt(k) != s.charAt(i))
k = p[k - 1];
if (s.charAt(k) == s.charAt(i))
++k;
p[i] = k;
}
return p;
}
public static int kmpMatcher(String s, String pattern) {
int m = pattern.length();
if (m == 0)
return 0;
int[] p = prefixFunction(pattern);
for (int i = 0, k = 0; i < s.length(); i++) {
while (k > 0 && pattern.charAt(k) != s.charAt(i))
k = p[k - 1];
if (pattern.charAt(k) == s.charAt(i))
++k;
if (k == m)
return i + 1 - m;
}
return -1;
}
// random tests
public static void main(String[] args) {
Random rnd = new Random(1);
for (int step = 0; step < 10_000; step++) {
String s = getRandomString(rnd, 100);
String pattern = getRandomString(rnd, 5);
int pos1 = kmpMatcher(s, pattern);
int pos2 = s.indexOf(pattern);
if (pos1 != pos2)
throw new RuntimeException();
}
}
static String getRandomString(Random rnd, int maxlen) {
int n = rnd.nextInt(maxlen);
char[] s = new char[n];
for (int i = 0; i < n; i++)
s[i] = (char) ('a' + rnd.nextInt(3));
return new String(s);
}
}
|
update
|
java/src/strings/Kmp.java
|
update
|
|
Java
|
apache-2.0
|
a86724e6575eff7d0998034fd9f9410f139cfff3
| 0
|
changbai1980/maven,cstamas/maven,rogerchina/maven,Mounika-Chirukuri/maven,xasx/maven,josephw/maven,xasx/maven,olamy/maven,vedmishr/demo1,barthel/maven,changbai1980/maven,Mounika-Chirukuri/maven,apache/maven,mcculls/maven,aheritier/maven,lbndev/maven,mizdebsk/maven,atanasenko/maven,cstamas/maven,Mounika-Chirukuri/maven,trajano/maven,barthel/maven,dsyer/maven,mcculls/maven,stephenc/maven,vedmishr/demo1,keith-turner/maven,ChristianSchulte/maven,keith-turner/maven,runepeter/maven-deploy-plugin-2.8.1,pkozelka/maven,rogerchina/maven,Tibor17/maven,njuneau/maven,skitt/maven,runepeter/maven-deploy-plugin-2.8.1,changbai1980/maven,gorcz/maven,wangyuesong/maven,njuneau/maven,pkozelka/maven,mizdebsk/maven,ChristianSchulte/maven,likaiwalkman/maven,apache/maven,lbndev/maven,stephenc/maven,trajano/maven,olamy/maven,mcculls/maven,Distrotech/maven,stephenc/maven,aheritier/maven,Distrotech/maven,cstamas/maven,atanasenko/maven,barthel/maven,wangyuesong0/maven,karthikjaps/maven,apache/maven,skitt/maven,rogerchina/maven,josephw/maven,dsyer/maven,gorcz/maven,josephw/maven,gorcz/maven,likaiwalkman/maven,Tibor17/maven,trajano/maven,kidaa/maven-1,ChristianSchulte/maven,kidaa/maven-1,karthikjaps/maven,olamy/maven,wangyuesong0/maven,karthikjaps/maven,vedmishr/demo1,kidaa/maven-1,wangyuesong/maven,pkozelka/maven,keith-turner/maven,wangyuesong/maven,xasx/maven,njuneau/maven,atanasenko/maven,aheritier/maven,dsyer/maven,lbndev/maven,wangyuesong0/maven,likaiwalkman/maven,mizdebsk/maven,skitt/maven
|
package org.apache.maven.tools.repoclean.translate;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.model.Build;
import org.apache.maven.model.CiManagement;
import org.apache.maven.model.Contributor;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Developer;
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.IssueManagement;
import org.apache.maven.model.License;
import org.apache.maven.model.MailingList;
import org.apache.maven.model.Model;
import org.apache.maven.model.Notifier;
import org.apache.maven.model.Organization;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.ReportPlugin;
import org.apache.maven.model.Reporting;
import org.apache.maven.model.Repository;
import org.apache.maven.model.Resource;
import org.apache.maven.model.Scm;
import org.apache.maven.model.Site;
import org.apache.maven.model.v3_0_0.UnitTest;
import org.apache.maven.tools.repoclean.report.ReportWriteException;
import org.apache.maven.tools.repoclean.report.Reporter;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author jdcasey
*/
public class PomV3ToV4Translator
extends AbstractLogEnabled
{
public static final String ROLE = PomV3ToV4Translator.class.getName();
private transient List discoveredPlugins = new ArrayList();
public Model translate( org.apache.maven.model.v3_0_0.Model v3Model, Reporter reporter )
throws ReportWriteException
{
try
{
String groupId = format( v3Model.getGroupId() );
String artifactId = format( v3Model.getArtifactId() );
String id = v3Model.getId();
if ( StringUtils.isNotEmpty( id ) )
{
if ( StringUtils.isEmpty( groupId ) )
{
int plusIdx = id.indexOf( "+" );
if ( plusIdx > -1 )
{
groupId = id.substring( 0, plusIdx );
}
else
{
groupId = id;
}
}
if ( StringUtils.isEmpty( artifactId ) )
{
artifactId = format( id );
}
}
String version = format( v3Model.getCurrentVersion() );
if ( version == null )
{
version = format( v3Model.getVersion() );
}
PomKey pomKey = new PomKey( groupId, artifactId, version );
warnOfUnsupportedMainModelElements( v3Model, reporter );
Model model = null;
try
{
model = new Model();
model.setArtifactId( artifactId );
// moved this above the translation of the build, to allow
// additional plugins to be defined in v3 poms via
// <dependency><type>plugin</type></dependency>
model.setDependencies( translateDependencies( v3Model.getDependencies() ) );
model.setBuild( translateBuild( v3Model.getBuild(), reporter ) );
model.setCiManagement( translateCiManagementInfo( v3Model.getBuild() ) );
model.setContributors( translateContributors( v3Model.getContributors() ) );
model.setDescription( v3Model.getDescription() );
model.setDevelopers( translateDevelopers( v3Model.getDevelopers() ) );
model.setDistributionManagement( translateDistributionManagement( pomKey, v3Model ) );
model.setGroupId( groupId );
model.setInceptionYear( v3Model.getInceptionYear() );
model.setIssueManagement( translateIssueManagement( v3Model ) );
model.setLicenses( translateLicenses( v3Model.getLicenses() ) );
model.setMailingLists( translateMailingLists( v3Model.getMailingLists() ) );
model.setModelVersion( "4.0.0" );
model.setName( v3Model.getName() );
model.setOrganization( translateOrganization( v3Model.getOrganization(), reporter ) );
model.setPackaging( "jar" );
// TODO: not very good conversion - just omit for now
// model.setReporting( translateReports( v3Model.getReports(), reporter ) );
model.setScm( translateScm( v3Model ) );
model.setUrl( v3Model.getUrl() );
model.setVersion( version );
}
catch ( PomTranslationException e )
{
reporter.error( "Invalid POM detected. Cannot translate.", e );
}
return model;
}
finally
{
this.discoveredPlugins.clear();
}
}
private String format( String source )
{
return source == null ? null : source.replace( '+', '-' );
}
private CiManagement translateCiManagementInfo( org.apache.maven.model.v3_0_0.Build v3Build )
{
CiManagement ciMgmt = null;
if ( v3Build != null )
{
String nagEmailAddress = v3Build.getNagEmailAddress();
if ( StringUtils.isNotEmpty( nagEmailAddress ) )
{
Notifier notifier = new Notifier();
notifier.setAddress( nagEmailAddress );
notifier.setType( "mail" );
ciMgmt = new CiManagement();
ciMgmt.addNotifier( notifier );
}
}
return ciMgmt;
}
private void warnOfUnsupportedMainModelElements( org.apache.maven.model.v3_0_0.Model v3Model, Reporter reporter )
throws ReportWriteException
{
if ( StringUtils.isNotEmpty( v3Model.getExtend() ) )
{
reporter.warn( "Ignoring non-portable parent declaration: " + v3Model.getExtend() );
}
if ( StringUtils.isNotEmpty( v3Model.getGumpRepositoryId() ) )
{
reporter.warn( "Ignoring gump repository id: \'" + v3Model.getGumpRepositoryId() +
"\'. This is not supported in v4 POMs." );
}
if ( notEmpty( v3Model.getVersions() ) )
{
reporter.warn( "Ignoring <versions/> section. This is not supported in v4 POMs." );
}
if ( notEmpty( v3Model.getBranches() ) )
{
reporter.warn( "Ignoring <branches/> section. This is not supported in v4 POMs." );
}
Properties v3ModelProperties = v3Model.getProperties();
if ( v3ModelProperties != null && !v3ModelProperties.isEmpty() )
{
reporter.warn( "Ignoring <properties/> section. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Model.getPackage() ) )
{
reporter.warn( "Ignoring <package/>. It is not supported in v4 POMs." );
}
if ( notEmpty( v3Model.getPackageGroups() ) )
{
reporter.warn( "Ignoring <packageGroups/> section. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Model.getLogo() ) )
{
reporter.warn( "Ignoring <logo/> for project. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Model.getShortDescription() ) )
{
reporter.warn( "Ignoring <shortDescription/>. It is not supported in v4 POMs." );
}
}
private Scm translateScm( org.apache.maven.model.v3_0_0.Model v3Model )
{
Scm scm = null;
org.apache.maven.model.v3_0_0.Repository repo = v3Model.getRepository();
if ( repo != null )
{
scm = new Scm();
scm.setConnection( repo.getConnection() );
scm.setDeveloperConnection( repo.getDeveloperConnection() );
scm.setUrl( repo.getUrl() );
}
return scm;
}
private Reporting translateReports( List v3Reports, Reporter reporter )
throws ReportWriteException
{
Reporting reports = null;
if ( v3Reports != null && !v3Reports.isEmpty() )
{
reports = new Reporting();
for ( Iterator it = v3Reports.iterator(); it.hasNext(); )
{
String reportName = (String) it.next();
Pattern pluginNamePattern = Pattern.compile( "maven-(.+)-plugin" );
Matcher matcher = pluginNamePattern.matcher( reportName );
String reportPluginName;
if ( !matcher.matches() )
{
reporter.warn(
"Non-standard report name: \'" + reportName + "\'. Using entire name for plugin artifactId." );
reportPluginName = reportName;
}
else
{
reportPluginName = matcher.group( 1 );
}
ReportPlugin reportPlugin = new ReportPlugin();
reportPlugin.setGroupId( "maven" );
reportPlugin.setArtifactId( reportPluginName );
StringBuffer info = new StringBuffer();
info.append( "Using some derived information for report: \'" ).append( reportName ).append( "\'.\n" )
.append( "\to groupId: \'maven\'\n" ).append( "\to artifactId: \'" ).append( reportPluginName )
.append( "\'\n" ).append( "\to goal: \'report\'\n" )
.append( "\n" )
.append( "These values were extracted using the v3 report naming convention, but may be wrong." );
reporter.warn( info.toString() );
reports.addPlugin( reportPlugin );
}
}
return reports;
}
private Organization translateOrganization( org.apache.maven.model.v3_0_0.Organization v3Organization,
Reporter reporter )
throws ReportWriteException
{
Organization organization = null;
if ( v3Organization != null )
{
organization = new Organization();
organization.setName( v3Organization.getName() );
organization.setUrl( v3Organization.getUrl() );
if ( StringUtils.isNotEmpty( v3Organization.getLogo() ) )
{
reporter.warn( "Ignoring <organization><logo/></organization>. It is not supported in v4 POMs." );
}
}
return organization;
}
private List translateMailingLists( List v3MailingLists )
{
List mailingLists = new ArrayList();
if ( notEmpty( v3MailingLists ) )
{
for ( Iterator it = v3MailingLists.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.MailingList v3List = (org.apache.maven.model.v3_0_0.MailingList) it
.next();
MailingList list = new MailingList();
list.setArchive( v3List.getArchive() );
list.setName( v3List.getName() );
list.setSubscribe( v3List.getSubscribe() );
list.setUnsubscribe( v3List.getUnsubscribe() );
mailingLists.add( list );
}
}
return mailingLists;
}
private List translateLicenses( List v3Licenses )
{
List licenses = new ArrayList();
if ( notEmpty( v3Licenses ) )
{
for ( Iterator it = v3Licenses.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.License v3License = (org.apache.maven.model.v3_0_0.License) it.next();
License license = new License();
license.setComments( v3License.getComments() );
license.setName( v3License.getName() );
license.setUrl( v3License.getUrl() );
licenses.add( license );
}
}
return licenses;
}
private IssueManagement translateIssueManagement( org.apache.maven.model.v3_0_0.Model v3Model )
{
IssueManagement issueMgmt = null;
String issueTrackingUrl = v3Model.getIssueTrackingUrl();
if ( StringUtils.isNotEmpty( issueTrackingUrl ) )
{
issueMgmt = new IssueManagement();
issueMgmt.setUrl( issueTrackingUrl );
}
return issueMgmt;
}
private DistributionManagement translateDistributionManagement( PomKey pomKey,
org.apache.maven.model.v3_0_0.Model v3Model )
throws PomTranslationException
{
DistributionManagement distributionManagement = new DistributionManagement();
Site site = null;
String siteAddress = v3Model.getSiteAddress();
String siteDirectory = v3Model.getSiteDirectory();
if ( StringUtils.isEmpty( siteAddress ) )
{
if ( !StringUtils.isEmpty( siteDirectory ) )
{
site = new Site();
site.setId( "default" );
site.setName( "Default Site" );
site.setUrl( "file://" + siteDirectory );
}
}
else
{
if ( StringUtils.isEmpty( siteDirectory ) )
{
throw new PomTranslationException( pomKey.groupId(), pomKey.artifactId(), pomKey.version(),
"Missing 'siteDirectory': Both siteAddress and siteDirectory must be set at the same time." );
}
site = new Site();
site.setId( "default" );
site.setName( "Default Site" );
site.setUrl( "scp://" + siteAddress + "/" + siteDirectory );
}
distributionManagement.setSite( site );
String distributionSite = v3Model.getDistributionSite();
String distributionDirectory = v3Model.getDistributionDirectory();
Repository repository = null;
if ( StringUtils.isEmpty( distributionSite ) )
{
if ( !StringUtils.isEmpty( distributionDirectory ) )
{
repository = new Repository();
repository.setId( "default" );
repository.setName( "Default Repository" );
repository.setUrl( "file://" + distributionDirectory );
// throw new Exception( "Missing 'distributionSite': Both distributionSite and
// distributionDirectory must be set." );
}
}
else
{
if ( StringUtils.isEmpty( distributionDirectory ) )
{
throw new PomTranslationException( pomKey.groupId(), pomKey.artifactId(), pomKey.version(),
"Missing 'distributionDirectory': must be set is 'distributionSite' is set." );
}
repository = new Repository();
repository.setId( "default" );
repository.setName( "Default Repository" );
repository.setUrl( distributionSite + "/" + distributionDirectory );
}
distributionManagement.setRepository( repository );
if ( site == null && repository == null )
{
return null;
}
return distributionManagement;
}
private List translateDevelopers( List v3Developers )
{
List developers = new ArrayList();
if ( notEmpty( v3Developers ) )
{
for ( Iterator it = v3Developers.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Developer v3Developer = (org.apache.maven.model.v3_0_0.Developer) it
.next();
Developer developer = new Developer();
developer.setEmail( v3Developer.getEmail() );
developer.setId( v3Developer.getId() );
developer.setName( v3Developer.getName() );
developer.setOrganization( v3Developer.getOrganization() );
developer.setRoles( v3Developer.getRoles() );
developer.setTimezone( v3Developer.getTimezone() );
developer.setUrl( v3Developer.getUrl() );
developers.add( developer );
}
}
return developers;
}
private List translateDependencies( List v3Deps )
{
List deps = new ArrayList();
if ( notEmpty( v3Deps ) )
{
for ( Iterator it = v3Deps.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Dependency v3Dep = (org.apache.maven.model.v3_0_0.Dependency) it.next();
String groupId = format( v3Dep.getGroupId() );
String artifactId = format( v3Dep.getArtifactId() );
String id = v3Dep.getId();
if ( StringUtils.isNotEmpty( id ) )
{
if ( StringUtils.isEmpty( groupId ) )
{
int plusIdx = id.indexOf( "+" );
if ( plusIdx > -1 )
{
groupId = id.substring( 0, plusIdx );
}
else
{
groupId = id;
}
}
if ( StringUtils.isEmpty( artifactId ) )
{
artifactId = format( id );
}
}
String type = v3Dep.getType();
if ( "plugin".equals( type ) )
{
if ( "maven".equals( groupId ) )
{
groupId = "org.apache.maven.plugins";
}
Plugin plugin = new Plugin();
plugin.setGroupId( groupId );
plugin.setArtifactId( artifactId );
plugin.setVersion( format( v3Dep.getVersion() ) );
Xpp3Dom config = new Xpp3Dom( "configuration" );
Properties props = v3Dep.getProperties();
if ( !props.isEmpty() )
{
for ( Iterator propertyIterator = props.keySet().iterator(); propertyIterator.hasNext(); )
{
String key = (String) propertyIterator.next();
String value = props.getProperty( key );
Xpp3Dom child = new Xpp3Dom( key );
child.setValue( value );
config.addChild( child );
}
}
plugin.setConfiguration( config );
this.discoveredPlugins.add( plugin );
}
else
{
Dependency dep = new Dependency();
dep.setGroupId( groupId );
dep.setArtifactId( artifactId );
dep.setVersion( v3Dep.getVersion() );
dep.setType( v3Dep.getType() );
String scope = v3Dep.getProperty( "scope" );
if ( StringUtils.isNotEmpty( scope ) )
{
dep.setScope( scope );
}
deps.add( dep );
}
}
}
return deps;
}
private List translateContributors( List v3Contributors )
{
List contributors = new ArrayList();
if ( notEmpty( v3Contributors ) )
{
for ( Iterator it = v3Contributors.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Contributor v3Contributor = (org.apache.maven.model.v3_0_0.Contributor) it
.next();
Contributor contributor = new Contributor();
contributor.setEmail( v3Contributor.getEmail() );
contributor.setName( v3Contributor.getName() );
contributor.setOrganization( v3Contributor.getOrganization() );
contributor.setRoles( v3Contributor.getRoles() );
contributor.setTimezone( v3Contributor.getTimezone() );
contributor.setUrl( v3Contributor.getUrl() );
contributors.add( contributor );
}
}
return contributors;
}
private Build translateBuild( org.apache.maven.model.v3_0_0.Build v3Build, Reporter reporter )
throws ReportWriteException
{
Build build = null;
if ( v3Build != null )
{
build = new Build();
warnOfUnsupportedBuildElements( v3Build, reporter );
build.setSourceDirectory( v3Build.getSourceDirectory() );
build.setTestSourceDirectory( v3Build.getUnitTestSourceDirectory() );
build.setResources( translateResources( v3Build.getResources() ) );
UnitTest unitTest = v3Build.getUnitTest();
if ( unitTest != null )
{
build.setTestResources( translateResources( unitTest.getResources() ) );
List testIncludes = unitTest.getIncludes();
List testExcludes = new ArrayList( unitTest.getExcludes() );
if ( notEmpty( testIncludes ) || notEmpty( testExcludes ) )
{
Plugin plugin = new Plugin();
plugin.setGroupId( "org.apache.maven.plugins" );
plugin.setArtifactId( "surefire" );
Xpp3Dom config = new Xpp3Dom( "configuration" );
if ( notEmpty( testIncludes ) )
{
Xpp3Dom includes = new Xpp3Dom( "includes" );
for ( Iterator it = testIncludes.iterator(); it.hasNext(); )
{
String includePattern = (String) it.next();
Xpp3Dom include = new Xpp3Dom( "include" );
include.setValue( includePattern );
includes.addChild( include );
}
config.addChild( includes );
}
if ( notEmpty( testExcludes ) )
{
Xpp3Dom excludes = new Xpp3Dom( "excludes" );
for ( Iterator it = testExcludes.iterator(); it.hasNext(); )
{
String excludePattern = (String) it.next();
Xpp3Dom exclude = new Xpp3Dom( "exclude" );
exclude.setValue( excludePattern );
excludes.addChild( exclude );
}
config.addChild( excludes );
}
if ( config.getChildCount() > 0 )
{
plugin.setConfiguration( config );
}
build.addPlugin( plugin );
}
}
}
if ( !this.discoveredPlugins.isEmpty() )
{
if ( build == null )
{
build = new Build();
}
for ( Iterator it = this.discoveredPlugins.iterator(); it.hasNext(); )
{
Plugin plugin = (Plugin) it.next();
build.addPlugin( plugin );
}
}
return build;
}
private void warnOfUnsupportedBuildElements( org.apache.maven.model.v3_0_0.Build v3Build, Reporter reporter )
throws ReportWriteException
{
if ( notEmpty( v3Build.getSourceModifications() ) )
{
reporter.warn( "Ignoring <sourceModifications/> section. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Build.getAspectSourceDirectory() ) )
{
reporter.warn( "Ignoring <aspectSourceDirectory/>. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Build.getIntegrationUnitTestSourceDirectory() ) )
{
reporter.warn( "Ignoring <integrationUnitTestSourceDirectory/>. It is not supported in v4 POMs." );
}
}
private List translateResources( List v3Resources )
{
List resources = new ArrayList();
if ( notEmpty( v3Resources ) )
{
for ( Iterator it = v3Resources.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Resource v3Resource = (org.apache.maven.model.v3_0_0.Resource) it.next();
Resource resource = new Resource();
resource.setDirectory( v3Resource.getDirectory() );
List excludes = new ArrayList( v3Resource.getExcludes() );
resource.setExcludes( excludes );
resource.setIncludes( v3Resource.getIncludes() );
resource.setTargetPath( v3Resource.getTargetPath() );
resources.add( resource );
}
}
return resources;
}
// private String pathPatternsToString( List patterns )
// {
// StringBuffer result = new StringBuffer();
//
// if ( notEmpty( patterns ) )
// {
// for ( Iterator it = patterns.iterator(); it.hasNext(); )
// {
// String pattern = (String) it.next();
//
// result.append( "," ).append( pattern );
// }
//
// result.setLength( result.length() - 1 );
// }
//
// return result.toString();
// }
private boolean notEmpty( List test )
{
return test != null && !test.isEmpty();
}
private static class PomKey
{
private final String groupId;
private final String artifactId;
private final String version;
PomKey( String groupId, String artifactId, String version )
{
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
}
public String groupId()
{
return groupId;
}
public String artifactId()
{
return artifactId;
}
public String version()
{
return version;
}
}
}
|
sandbox/repoclean/src/main/java/org/apache/maven/tools/repoclean/translate/PomV3ToV4Translator.java
|
package org.apache.maven.tools.repoclean.translate;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.model.Build;
import org.apache.maven.model.CiManagement;
import org.apache.maven.model.Contributor;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Developer;
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.IssueManagement;
import org.apache.maven.model.License;
import org.apache.maven.model.MailingList;
import org.apache.maven.model.Model;
import org.apache.maven.model.Notifier;
import org.apache.maven.model.Organization;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.ReportPlugin;
import org.apache.maven.model.Reporting;
import org.apache.maven.model.Repository;
import org.apache.maven.model.Resource;
import org.apache.maven.model.Scm;
import org.apache.maven.model.Site;
import org.apache.maven.model.v3_0_0.UnitTest;
import org.apache.maven.tools.repoclean.report.ReportWriteException;
import org.apache.maven.tools.repoclean.report.Reporter;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author jdcasey
*/
public class PomV3ToV4Translator
extends AbstractLogEnabled
{
public static final String ROLE = PomV3ToV4Translator.class.getName();
private transient List discoveredPlugins = new ArrayList();
public Model translate( org.apache.maven.model.v3_0_0.Model v3Model, Reporter reporter )
throws ReportWriteException
{
try
{
String groupId = format( v3Model.getGroupId() );
String artifactId = format( v3Model.getArtifactId() );
String id = v3Model.getId();
if ( StringUtils.isNotEmpty( id ) )
{
if ( StringUtils.isEmpty( groupId ) )
{
int plusIdx = id.indexOf( "+" );
if ( plusIdx > -1 )
{
groupId = id.substring( 0, plusIdx );
}
else
{
groupId = id;
}
}
if ( StringUtils.isEmpty( artifactId ) )
{
artifactId = format( id );
}
}
String version = format( v3Model.getCurrentVersion() );
if ( version == null )
{
version = format( v3Model.getVersion() );
}
PomKey pomKey = new PomKey( groupId, artifactId, version );
warnOfUnsupportedMainModelElements( v3Model, reporter );
Model model = null;
try
{
model = new Model();
model.setArtifactId( artifactId );
// moved this above the translation of the build, to allow
// additional plugins to be defined in v3 poms via
// <dependency><type>plugin</type></dependency>
model.setDependencies( translateDependencies( v3Model.getDependencies() ) );
model.setBuild( translateBuild( v3Model.getBuild(), reporter ) );
model.setCiManagement( translateCiManagementInfo( v3Model.getBuild() ) );
model.setContributors( translateContributors( v3Model.getContributors() ) );
model.setDescription( v3Model.getDescription() );
model.setDevelopers( translateDevelopers( v3Model.getDevelopers() ) );
model.setDistributionManagement( translateDistributionManagement( pomKey, v3Model ) );
model.setGroupId( groupId );
model.setInceptionYear( v3Model.getInceptionYear() );
model.setIssueManagement( translateIssueManagement( v3Model ) );
model.setLicenses( translateLicenses( v3Model.getLicenses() ) );
model.setMailingLists( translateMailingLists( v3Model.getMailingLists() ) );
model.setModelVersion( "4.0.0" );
model.setName( v3Model.getName() );
model.setOrganization( translateOrganization( v3Model.getOrganization(), reporter ) );
model.setPackaging( "jar" );
model.setReports( translateReports( v3Model.getReports(), reporter ) );
model.setScm( translateScm( v3Model ) );
model.setUrl( v3Model.getUrl() );
model.setVersion( version );
}
catch ( PomTranslationException e )
{
reporter.error( "Invalid POM detected. Cannot translate.", e );
}
return model;
}
finally
{
this.discoveredPlugins.clear();
}
}
private String format( String source )
{
return source == null ? null : source.replace( '+', '-' );
}
private CiManagement translateCiManagementInfo( org.apache.maven.model.v3_0_0.Build v3Build )
{
CiManagement ciMgmt = null;
if ( v3Build != null )
{
String nagEmailAddress = v3Build.getNagEmailAddress();
if ( StringUtils.isNotEmpty( nagEmailAddress ) )
{
Notifier notifier = new Notifier();
notifier.setAddress( nagEmailAddress );
notifier.setType( "mail" );
ciMgmt = new CiManagement();
ciMgmt.addNotifier( notifier );
}
}
return ciMgmt;
}
private void warnOfUnsupportedMainModelElements( org.apache.maven.model.v3_0_0.Model v3Model, Reporter reporter )
throws ReportWriteException
{
if ( StringUtils.isNotEmpty( v3Model.getExtend() ) )
{
reporter.warn( "Ignoring non-portable parent declaration: " + v3Model.getExtend() );
}
if ( StringUtils.isNotEmpty( v3Model.getGumpRepositoryId() ) )
{
reporter.warn( "Ignoring gump repository id: \'" + v3Model.getGumpRepositoryId() +
"\'. This is not supported in v4 POMs." );
}
if ( notEmpty( v3Model.getVersions() ) )
{
reporter.warn( "Ignoring <versions/> section. This is not supported in v4 POMs." );
}
if ( notEmpty( v3Model.getBranches() ) )
{
reporter.warn( "Ignoring <branches/> section. This is not supported in v4 POMs." );
}
Properties v3ModelProperties = v3Model.getProperties();
if ( v3ModelProperties != null && !v3ModelProperties.isEmpty() )
{
reporter.warn( "Ignoring <properties/> section. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Model.getPackage() ) )
{
reporter.warn( "Ignoring <package/>. It is not supported in v4 POMs." );
}
if ( notEmpty( v3Model.getPackageGroups() ) )
{
reporter.warn( "Ignoring <packageGroups/> section. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Model.getLogo() ) )
{
reporter.warn( "Ignoring <logo/> for project. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Model.getShortDescription() ) )
{
reporter.warn( "Ignoring <shortDescription/>. It is not supported in v4 POMs." );
}
}
private Scm translateScm( org.apache.maven.model.v3_0_0.Model v3Model )
{
Scm scm = null;
org.apache.maven.model.v3_0_0.Repository repo = v3Model.getRepository();
if ( repo != null )
{
scm = new Scm();
scm.setConnection( repo.getConnection() );
scm.setDeveloperConnection( repo.getDeveloperConnection() );
scm.setUrl( repo.getUrl() );
}
return scm;
}
private Reporting translateReports( List v3Reports, Reporter reporter )
throws ReportWriteException
{
Reporting reports = null;
if ( v3Reports != null && !v3Reports.isEmpty() )
{
reports = new Reporting();
for ( Iterator it = v3Reports.iterator(); it.hasNext(); )
{
String reportName = (String) it.next();
Pattern pluginNamePattern = Pattern.compile( "maven-(.+)-plugin" );
Matcher matcher = pluginNamePattern.matcher( reportName );
String reportPluginName;
if ( !matcher.matches() )
{
reporter.warn(
"Non-standard report name: \'" + reportName + "\'. Using entire name for plugin artifactId." );
reportPluginName = reportName;
}
else
{
reportPluginName = matcher.group( 1 );
}
ReportPlugin reportPlugin = new ReportPlugin();
reportPlugin.setGroupId( "maven" );
reportPlugin.setArtifactId( reportPluginName );
StringBuffer info = new StringBuffer();
info.append( "Using some derived information for report: \'" ).append( reportName ).append( "\'.\n" )
.append( "\to groupId: \'maven\'\n" ).append( "\to artifactId: \'" ).append( reportPluginName )
.append( "\'\n" ).append( "\to goal: \'report\'\n" )
.append( "\n" )
.append( "These values were extracted using the v3 report naming convention, but may be wrong." );
reporter.warn( info.toString() );
reports.addPlugin( reportPlugin );
}
}
return reports;
}
private Organization translateOrganization( org.apache.maven.model.v3_0_0.Organization v3Organization,
Reporter reporter )
throws ReportWriteException
{
Organization organization = null;
if ( v3Organization != null )
{
organization = new Organization();
organization.setName( v3Organization.getName() );
organization.setUrl( v3Organization.getUrl() );
if ( StringUtils.isNotEmpty( v3Organization.getLogo() ) )
{
reporter.warn( "Ignoring <organization><logo/></organization>. It is not supported in v4 POMs." );
}
}
return organization;
}
private List translateMailingLists( List v3MailingLists )
{
List mailingLists = new ArrayList();
if ( notEmpty( v3MailingLists ) )
{
for ( Iterator it = v3MailingLists.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.MailingList v3List = (org.apache.maven.model.v3_0_0.MailingList) it
.next();
MailingList list = new MailingList();
list.setArchive( v3List.getArchive() );
list.setName( v3List.getName() );
list.setSubscribe( v3List.getSubscribe() );
list.setUnsubscribe( v3List.getUnsubscribe() );
mailingLists.add( list );
}
}
return mailingLists;
}
private List translateLicenses( List v3Licenses )
{
List licenses = new ArrayList();
if ( notEmpty( v3Licenses ) )
{
for ( Iterator it = v3Licenses.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.License v3License = (org.apache.maven.model.v3_0_0.License) it.next();
License license = new License();
license.setComments( v3License.getComments() );
license.setName( v3License.getName() );
license.setUrl( v3License.getUrl() );
licenses.add( license );
}
}
return licenses;
}
private IssueManagement translateIssueManagement( org.apache.maven.model.v3_0_0.Model v3Model )
{
IssueManagement issueMgmt = null;
String issueTrackingUrl = v3Model.getIssueTrackingUrl();
if ( StringUtils.isNotEmpty( issueTrackingUrl ) )
{
issueMgmt = new IssueManagement();
issueMgmt.setUrl( issueTrackingUrl );
}
return issueMgmt;
}
private DistributionManagement translateDistributionManagement( PomKey pomKey,
org.apache.maven.model.v3_0_0.Model v3Model )
throws PomTranslationException
{
DistributionManagement distributionManagement = new DistributionManagement();
Site site = null;
String siteAddress = v3Model.getSiteAddress();
String siteDirectory = v3Model.getSiteDirectory();
if ( StringUtils.isEmpty( siteAddress ) )
{
if ( !StringUtils.isEmpty( siteDirectory ) )
{
site = new Site();
site.setId( "default" );
site.setName( "Default Site" );
site.setUrl( "file://" + siteDirectory );
}
}
else
{
if ( StringUtils.isEmpty( siteDirectory ) )
{
throw new PomTranslationException( pomKey.groupId(), pomKey.artifactId(), pomKey.version(),
"Missing 'siteDirectory': Both siteAddress and siteDirectory must be set at the same time." );
}
site = new Site();
site.setId( "default" );
site.setName( "Default Site" );
site.setUrl( "scp://" + siteAddress + "/" + siteDirectory );
}
distributionManagement.setSite( site );
String distributionSite = v3Model.getDistributionSite();
String distributionDirectory = v3Model.getDistributionDirectory();
Repository repository = null;
if ( StringUtils.isEmpty( distributionSite ) )
{
if ( !StringUtils.isEmpty( distributionDirectory ) )
{
repository = new Repository();
repository.setId( "default" );
repository.setName( "Default Repository" );
repository.setUrl( "file://" + distributionDirectory );
// throw new Exception( "Missing 'distributionSite': Both distributionSite and
// distributionDirectory must be set." );
}
}
else
{
if ( StringUtils.isEmpty( distributionDirectory ) )
{
throw new PomTranslationException( pomKey.groupId(), pomKey.artifactId(), pomKey.version(),
"Missing 'distributionDirectory': must be set is 'distributionSite' is set." );
}
repository = new Repository();
repository.setId( "default" );
repository.setName( "Default Repository" );
repository.setUrl( distributionSite + "/" + distributionDirectory );
}
distributionManagement.setRepository( repository );
if ( site == null && repository == null )
{
return null;
}
return distributionManagement;
}
private List translateDevelopers( List v3Developers )
{
List developers = new ArrayList();
if ( notEmpty( v3Developers ) )
{
for ( Iterator it = v3Developers.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Developer v3Developer = (org.apache.maven.model.v3_0_0.Developer) it
.next();
Developer developer = new Developer();
developer.setEmail( v3Developer.getEmail() );
developer.setId( v3Developer.getId() );
developer.setName( v3Developer.getName() );
developer.setOrganization( v3Developer.getOrganization() );
developer.setRoles( v3Developer.getRoles() );
developer.setTimezone( v3Developer.getTimezone() );
developer.setUrl( v3Developer.getUrl() );
developers.add( developer );
}
}
return developers;
}
private List translateDependencies( List v3Deps )
{
List deps = new ArrayList();
if ( notEmpty( v3Deps ) )
{
for ( Iterator it = v3Deps.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Dependency v3Dep = (org.apache.maven.model.v3_0_0.Dependency) it.next();
String groupId = format( v3Dep.getGroupId() );
String artifactId = format( v3Dep.getArtifactId() );
String id = v3Dep.getId();
if ( StringUtils.isNotEmpty( id ) )
{
if ( StringUtils.isEmpty( groupId ) )
{
int plusIdx = id.indexOf( "+" );
if ( plusIdx > -1 )
{
groupId = id.substring( 0, plusIdx );
}
else
{
groupId = id;
}
}
if ( StringUtils.isEmpty( artifactId ) )
{
artifactId = format( id );
}
}
String type = v3Dep.getType();
if ( "plugin".equals( type ) )
{
if ( "maven".equals( groupId ) )
{
groupId = "org.apache.maven.plugins";
}
Plugin plugin = new Plugin();
plugin.setGroupId( groupId );
plugin.setArtifactId( artifactId );
plugin.setVersion( format( v3Dep.getVersion() ) );
Xpp3Dom config = new Xpp3Dom( "configuration" );
Properties props = v3Dep.getProperties();
if ( !props.isEmpty() )
{
for ( Iterator propertyIterator = props.keySet().iterator(); propertyIterator.hasNext(); )
{
String key = (String) propertyIterator.next();
String value = props.getProperty( key );
Xpp3Dom child = new Xpp3Dom( key );
child.setValue( value );
config.addChild( child );
}
}
plugin.setConfiguration( config );
this.discoveredPlugins.add( plugin );
}
else
{
Dependency dep = new Dependency();
dep.setGroupId( groupId );
dep.setArtifactId( artifactId );
dep.setVersion( v3Dep.getVersion() );
dep.setType( v3Dep.getType() );
String scope = v3Dep.getProperty( "scope" );
if ( StringUtils.isNotEmpty( scope ) )
{
dep.setScope( scope );
}
deps.add( dep );
}
}
}
return deps;
}
private List translateContributors( List v3Contributors )
{
List contributors = new ArrayList();
if ( notEmpty( v3Contributors ) )
{
for ( Iterator it = v3Contributors.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Contributor v3Contributor = (org.apache.maven.model.v3_0_0.Contributor) it
.next();
Contributor contributor = new Contributor();
contributor.setEmail( v3Contributor.getEmail() );
contributor.setName( v3Contributor.getName() );
contributor.setOrganization( v3Contributor.getOrganization() );
contributor.setRoles( v3Contributor.getRoles() );
contributor.setTimezone( v3Contributor.getTimezone() );
contributor.setUrl( v3Contributor.getUrl() );
contributors.add( contributor );
}
}
return contributors;
}
private Build translateBuild( org.apache.maven.model.v3_0_0.Build v3Build, Reporter reporter )
throws ReportWriteException
{
Build build = null;
if ( v3Build != null )
{
build = new Build();
warnOfUnsupportedBuildElements( v3Build, reporter );
build.setSourceDirectory( v3Build.getSourceDirectory() );
build.setTestSourceDirectory( v3Build.getUnitTestSourceDirectory() );
build.setResources( translateResources( v3Build.getResources() ) );
UnitTest unitTest = v3Build.getUnitTest();
if ( unitTest != null )
{
build.setTestResources( translateResources( unitTest.getResources() ) );
List testIncludes = unitTest.getIncludes();
List testExcludes = new ArrayList( unitTest.getExcludes() );
if ( notEmpty( testIncludes ) || notEmpty( testExcludes ) )
{
Plugin plugin = new Plugin();
plugin.setGroupId( "org.apache.maven.plugins" );
plugin.setArtifactId( "surefire" );
Xpp3Dom config = new Xpp3Dom( "configuration" );
if ( notEmpty( testIncludes ) )
{
Xpp3Dom includes = new Xpp3Dom( "includes" );
for ( Iterator it = testIncludes.iterator(); it.hasNext(); )
{
String includePattern = (String) it.next();
Xpp3Dom include = new Xpp3Dom( "include" );
include.setValue( includePattern );
includes.addChild( include );
}
config.addChild( includes );
}
if ( notEmpty( testExcludes ) )
{
Xpp3Dom excludes = new Xpp3Dom( "excludes" );
for ( Iterator it = testExcludes.iterator(); it.hasNext(); )
{
String excludePattern = (String) it.next();
Xpp3Dom exclude = new Xpp3Dom( "exclude" );
exclude.setValue( excludePattern );
excludes.addChild( exclude );
}
config.addChild( excludes );
}
if ( config.getChildCount() > 0 )
{
plugin.setConfiguration( config );
}
build.addPlugin( plugin );
}
}
}
if ( !this.discoveredPlugins.isEmpty() )
{
if ( build == null )
{
build = new Build();
}
for ( Iterator it = this.discoveredPlugins.iterator(); it.hasNext(); )
{
Plugin plugin = (Plugin) it.next();
build.addPlugin( plugin );
}
}
return build;
}
private void warnOfUnsupportedBuildElements( org.apache.maven.model.v3_0_0.Build v3Build, Reporter reporter )
throws ReportWriteException
{
if ( notEmpty( v3Build.getSourceModifications() ) )
{
reporter.warn( "Ignoring <sourceModifications/> section. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Build.getAspectSourceDirectory() ) )
{
reporter.warn( "Ignoring <aspectSourceDirectory/>. It is not supported in v4 POMs." );
}
if ( StringUtils.isNotEmpty( v3Build.getIntegrationUnitTestSourceDirectory() ) )
{
reporter.warn( "Ignoring <integrationUnitTestSourceDirectory/>. It is not supported in v4 POMs." );
}
}
private List translateResources( List v3Resources )
{
List resources = new ArrayList();
if ( notEmpty( v3Resources ) )
{
for ( Iterator it = v3Resources.iterator(); it.hasNext(); )
{
org.apache.maven.model.v3_0_0.Resource v3Resource = (org.apache.maven.model.v3_0_0.Resource) it.next();
Resource resource = new Resource();
resource.setDirectory( v3Resource.getDirectory() );
List excludes = new ArrayList( v3Resource.getExcludes() );
resource.setExcludes( excludes );
resource.setIncludes( v3Resource.getIncludes() );
resource.setTargetPath( v3Resource.getTargetPath() );
resources.add( resource );
}
}
return resources;
}
// private String pathPatternsToString( List patterns )
// {
// StringBuffer result = new StringBuffer();
//
// if ( notEmpty( patterns ) )
// {
// for ( Iterator it = patterns.iterator(); it.hasNext(); )
// {
// String pattern = (String) it.next();
//
// result.append( "," ).append( pattern );
// }
//
// result.setLength( result.length() - 1 );
// }
//
// return result.toString();
// }
private boolean notEmpty( List test )
{
return test != null && !test.isEmpty();
}
private static class PomKey
{
private final String groupId;
private final String artifactId;
private final String version;
PomKey( String groupId, String artifactId, String version )
{
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
}
public String groupId()
{
return groupId;
}
public String artifactId()
{
return artifactId;
}
public String version()
{
return version;
}
}
}
|
turn off report conversion
git-svn-id: 2c527eb49caa05e19d6b2be874bf74fa9d7ea670@226782 13f79535-47bb-0310-9956-ffa450edef68
|
sandbox/repoclean/src/main/java/org/apache/maven/tools/repoclean/translate/PomV3ToV4Translator.java
|
turn off report conversion
|
|
Java
|
apache-2.0
|
19f28ca7e8aec324bdefcc18b4781d70f53fb15c
| 0
|
zqian/sakai,tl-its-umich-edu/sakai,kwedoff1/sakai,udayg/sakai,hackbuteer59/sakai,introp-software/sakai,pushyamig/sakai,udayg/sakai,bzhouduke123/sakai,introp-software/sakai,puramshetty/sakai,buckett/sakai-gitflow,OpenCollabZA/sakai,joserabal/sakai,ouit0408/sakai,hackbuteer59/sakai,rodriguezdevera/sakai,lorenamgUMU/sakai,tl-its-umich-edu/sakai,udayg/sakai,frasese/sakai,colczr/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,Fudan-University/sakai,willkara/sakai,hackbuteer59/sakai,kwedoff1/sakai,noondaysun/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,liubo404/sakai,colczr/sakai,whumph/sakai,tl-its-umich-edu/sakai,clhedrick/sakai,tl-its-umich-edu/sakai,OpenCollabZA/sakai,conder/sakai,puramshetty/sakai,udayg/sakai,clhedrick/sakai,whumph/sakai,ouit0408/sakai,kingmook/sakai,wfuedu/sakai,kingmook/sakai,puramshetty/sakai,ktakacs/sakai,pushyamig/sakai,frasese/sakai,lorenamgUMU/sakai,colczr/sakai,introp-software/sakai,kingmook/sakai,bkirschn/sakai,frasese/sakai,frasese/sakai,ouit0408/sakai,udayg/sakai,conder/sakai,hackbuteer59/sakai,lorenamgUMU/sakai,frasese/sakai,joserabal/sakai,pushyamig/sakai,kwedoff1/sakai,rodriguezdevera/sakai,introp-software/sakai,willkara/sakai,colczr/sakai,noondaysun/sakai,tl-its-umich-edu/sakai,frasese/sakai,surya-janani/sakai,tl-its-umich-edu/sakai,bkirschn/sakai,OpenCollabZA/sakai,wfuedu/sakai,ktakacs/sakai,bzhouduke123/sakai,Fudan-University/sakai,clhedrick/sakai,rodriguezdevera/sakai,Fudan-University/sakai,kingmook/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,udayg/sakai,liubo404/sakai,buckett/sakai-gitflow,liubo404/sakai,noondaysun/sakai,conder/sakai,bkirschn/sakai,hackbuteer59/sakai,pushyamig/sakai,pushyamig/sakai,noondaysun/sakai,willkara/sakai,bzhouduke123/sakai,Fudan-University/sakai,liubo404/sakai,whumph/sakai,clhedrick/sakai,kwedoff1/sakai,puramshetty/sakai,surya-janani/sakai,ouit0408/sakai,hackbuteer59/sakai,clhedrick/sakai,rodriguezdevera/sakai,clhedrick/sakai,joserabal/sakai,colczr/sakai,conder/sakai,puramshetty/sakai,clhedrick/sakai,OpenCollabZA/sakai,bzhouduke123/sakai,noondaysun/sakai,duke-compsci290-spring2016/sakai,puramshetty/sakai,willkara/sakai,zqian/sakai,zqian/sakai,ktakacs/sakai,surya-janani/sakai,ouit0408/sakai,zqian/sakai,lorenamgUMU/sakai,wfuedu/sakai,ktakacs/sakai,ktakacs/sakai,Fudan-University/sakai,OpenCollabZA/sakai,ouit0408/sakai,bkirschn/sakai,joserabal/sakai,duke-compsci290-spring2016/sakai,ktakacs/sakai,willkara/sakai,kingmook/sakai,OpenCollabZA/sakai,hackbuteer59/sakai,whumph/sakai,pushyamig/sakai,introp-software/sakai,udayg/sakai,OpenCollabZA/sakai,lorenamgUMU/sakai,clhedrick/sakai,wfuedu/sakai,liubo404/sakai,joserabal/sakai,OpenCollabZA/sakai,conder/sakai,conder/sakai,colczr/sakai,surya-janani/sakai,zqian/sakai,rodriguezdevera/sakai,joserabal/sakai,udayg/sakai,willkara/sakai,bkirschn/sakai,ouit0408/sakai,introp-software/sakai,conder/sakai,whumph/sakai,ouit0408/sakai,surya-janani/sakai,tl-its-umich-edu/sakai,liubo404/sakai,lorenamgUMU/sakai,noondaysun/sakai,ktakacs/sakai,willkara/sakai,duke-compsci290-spring2016/sakai,whumph/sakai,rodriguezdevera/sakai,liubo404/sakai,wfuedu/sakai,lorenamgUMU/sakai,wfuedu/sakai,surya-janani/sakai,ktakacs/sakai,colczr/sakai,kwedoff1/sakai,bkirschn/sakai,noondaysun/sakai,bzhouduke123/sakai,puramshetty/sakai,joserabal/sakai,rodriguezdevera/sakai,kingmook/sakai,bkirschn/sakai,introp-software/sakai,kwedoff1/sakai,hackbuteer59/sakai,lorenamgUMU/sakai,wfuedu/sakai,kwedoff1/sakai,pushyamig/sakai,rodriguezdevera/sakai,buckett/sakai-gitflow,liubo404/sakai,kingmook/sakai,tl-its-umich-edu/sakai,frasese/sakai,colczr/sakai,bzhouduke123/sakai,whumph/sakai,Fudan-University/sakai,bzhouduke123/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,joserabal/sakai,kwedoff1/sakai,zqian/sakai,wfuedu/sakai,puramshetty/sakai,conder/sakai,Fudan-University/sakai,kingmook/sakai,whumph/sakai,surya-janani/sakai,noondaysun/sakai,frasese/sakai,bzhouduke123/sakai,zqian/sakai,willkara/sakai,bkirschn/sakai,Fudan-University/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,buckett/sakai-gitflow,surya-janani/sakai,buckett/sakai-gitflow
|
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the"License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.ui.listener.author;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashSet;
import java.util.Set;
import java.util.ResourceBundle;
import javax.faces.context.FacesContext;
import javax.faces.application.FacesMessage;
import javax.faces.event.AbortProcessingException;
import javax.faces.event.ActionEvent;
import javax.faces.event.ActionListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionAttachmentIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionDataIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionMetaDataIfc;
import org.sakaiproject.tool.assessment.facade.AssessmentFacade;
import org.sakaiproject.tool.assessment.facade.QuestionPoolFacade;
import org.sakaiproject.tool.assessment.facade.AgentFacade;
import org.sakaiproject.tool.assessment.facade.ItemFacade;
import org.sakaiproject.tool.assessment.facade.SectionFacade;
import org.sakaiproject.tool.assessment.services.ItemService;
import org.sakaiproject.tool.assessment.services.QuestionPoolService;
import org.sakaiproject.tool.assessment.services.assessment.AssessmentService;
import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentBean;
import org.sakaiproject.tool.assessment.ui.bean.author.SectionBean;
import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil;
/**
* <p>Title: Samigo</p>2
* <p>Description: Sakai Assessment Manager</p>
* <p>Copyright: Copyright (c) 2004 Sakai Project</p>
* <p>Organization: Sakai Project</p>
* @author Ed Smiley
* @version $Id$
*/
public class SavePartListener
implements ActionListener
{
private static Log log = LogFactory.getLog(SavePartListener.class);
//private static ContextUtil cu;
public SavePartListener()
{
}
public void processAction(ActionEvent ae) throws AbortProcessingException
{
FacesContext context = FacesContext.getCurrentInstance();
//Map reqMap = context.getExternalContext().getRequestMap();
//Map requestParams = context.getExternalContext().getRequestParameterMap();
AssessmentBean assessmentBean = (AssessmentBean) ContextUtil.lookupBean(
"assessmentBean");
String assessmentId = assessmentBean.getAssessmentId();
SectionBean sectionBean= (SectionBean) ContextUtil.lookupBean(
"sectionBean");
// create an assessment based on the title entered and the assessment
// template selected
// #1 - read from form editpart.jsp
String title = (sectionBean.getSectionTitle()).trim();
String description = sectionBean.getSectionDescription();
String sectionId = sectionBean.getSectionId();
AssessmentService assessmentService = new AssessmentService();
SectionFacade section;
if (sectionId.equals("")){
section = addPart(assessmentId);
log.debug("**** section="+section);
sectionBean.setSection(section);
sectionId = section.getSectionId().toString();
}
else {
section = assessmentService.getSection(sectionId);
}
//Long assessmentId = section.getAssessmentId();
boolean addItemsFromPool = false;
sectionBean.setOutcome("editAssessment");
if((sectionBean.getType().equals("2"))&& (sectionBean.getSelectedPool().equals(""))){
String selectedPool_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","selectedPool_error");
context.addMessage(null,new FacesMessage(selectedPool_err));
sectionBean.setOutcome("editPart");
return ;
}
if (!("".equals(sectionBean.getType())) && ((SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()).equals(sectionBean.getType()))) {
addItemsFromPool = true;
if (validateItemsDrawn(sectionBean)) {
// if the author type was random draw type, and the new type is random draw , then we need to disassociate sectionid with each items. Cannot delete items, 'cuz these items are linked in the pool
if( (section !=null) && (section.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE)!=null) && (section.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE).equals(SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()))) {
assessmentService.removeAllItems(sectionId);
// need to reload
section = assessmentService.getSection(sectionId);
}
}
else {
sectionBean.setOutcome("editPart");
return;
}
}
log.debug("**** section title ="+section.getTitle());
log.debug("**** title ="+title);
// if (title != null & !title.equals("")) // There is no spec saying we don't allow empty string for title , SAK-4211
if (title != null)
section.setTitle(title);
section.setDescription(description);
// TODO: Need to save Type, Question Ordering, and Metadata
if (!("".equals(sectionBean.getKeyword())))
section.addSectionMetaData(SectionMetaDataIfc.KEYWORDS, sectionBean.getKeyword());
if (!("".equals(sectionBean.getObjective())))
section.addSectionMetaData(SectionMetaDataIfc.OBJECTIVES, sectionBean.getObjective());
if (!("".equals(sectionBean.getRubric())))
section.addSectionMetaData(SectionMetaDataIfc.RUBRICS, sectionBean.getRubric());
if (!("".equals(sectionBean.getQuestionOrdering())))
section.addSectionMetaData(SectionDataIfc.QUESTIONS_ORDERING, sectionBean.getQuestionOrdering());
if (!("".equals(sectionBean.getType()))) {
section.addSectionMetaData(SectionDataIfc.AUTHOR_TYPE, sectionBean.getType());
if ((SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()).equals(sectionBean.getType())) {
if ((sectionBean.getNumberSelected()!=null) && !("".equals(sectionBean.getNumberSelected())))
{
section.addSectionMetaData(SectionDataIfc.NUM_QUESTIONS_DRAWN, sectionBean.getNumberSelected());
}
if (!("".equals(sectionBean.getSelectedPool())))
{
section.addSectionMetaData(SectionDataIfc.POOLID_FOR_RANDOM_DRAW, sectionBean.getSelectedPool());
String poolname = "";
QuestionPoolService qpservice = new QuestionPoolService();
QuestionPoolFacade poolfacade = qpservice.getPool(new Long(sectionBean.getSelectedPool()), AgentFacade.getAgentString());
if (poolfacade!=null) {
poolname = poolfacade.getTitle();
}
section.addSectionMetaData(SectionDataIfc.POOLNAME_FOR_RANDOM_DRAW, poolname);
}
}
// attach item attachemnt to sectionBean
List attachmentList = section.getSectionAttachmentList();
sectionBean.setAttachmentList(attachmentList);
if (attachmentList != null && attachmentList.size() >0){
sectionBean.setHasAttachment(true);
}
else{
sectionBean.setHasAttachment(false);
}
}
// if author-type is random draw from pool, add all items from pool now
// Note: a pool can only be randomly drawn by one part. if part A is created to randomly draw from pool 1, and you create part B, and select the same pool 1, all items from part A will be removed. (item.sectionId will be set to sectionId of part B.
// currently if a pool is selected by one random draw part it will no longer show up in the poollist for random draw
if (addItemsFromPool)
{
QuestionPoolService qpservice = new QuestionPoolService();
//ItemService itemservice = new ItemService();
ArrayList itemlist = qpservice.getAllItems(new Long(sectionBean.getSelectedPool()) );
int i = 0;
Iterator iter = itemlist.iterator();
while(iter.hasNext())
{
ItemFacade item= (ItemFacade) iter.next();
item.setSection(section);
item.setSequence(new Integer(i+1));
section.addItem(item);
i= i+1;
}
}
assessmentService.saveOrUpdateSection(section);
// #2 - goto editAssessment.jsp, so reset assessmentBean
AssessmentFacade assessment = assessmentService.getAssessment(
assessmentBean.getAssessmentId());
assessmentBean.setAssessment(assessment);
assessmentService.updateAssessmentLastModifiedInfo(assessment);
}
public SectionFacade addPart(String assessmentId){
AssessmentService assessmentService = new AssessmentService();
SectionFacade section = assessmentService.addSection(
assessmentId);
return section;
}
public boolean validateItemsDrawn(SectionBean sectionBean){
FacesContext context = FacesContext.getCurrentInstance();
String numberDrawn = sectionBean.getNumberSelected();
String err;
QuestionPoolService qpservice = new QuestionPoolService();
ArrayList itemlist = qpservice.getAllItems(new Long(sectionBean.getSelectedPool()) );
int itemcount = itemlist.size();
String itemcountString=" "+Integer.toString(itemcount);
try{
int numberDrawnInt = Integer.parseInt(numberDrawn);
if(numberDrawnInt <=0 || numberDrawnInt>itemcount){
err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","qdrawn_error");
context.addMessage(null,new FacesMessage(err+itemcountString ));
return false;
}
} catch(NumberFormatException e){
err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","qdrawn_error");
context.addMessage(null,new FacesMessage(err+itemcountString ));
return false;
}
return true;
}
/*
private ArrayList prepareSectionAttachment(SectionDataIfc section){
Set attachmentSet = section.getSectionAttachmentSet();
if (attachmentSet == null){
attachmentSet = new HashSet();
}
log.debug("*** attachment size="+attachmentSet.size());
AssessmentService assessmentService = new AssessmentService();
String protocol = ContextUtil.getProtocol();
ToolSession session = SessionManager.getCurrentToolSession();
if (session.getAttribute(FilePickerHelper.FILE_PICKER_CANCEL) == null &&
session.getAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS) != null) {
List refs = (List)session.getAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS);
if (refs!=null && refs.size() > 0){
Reference ref = (Reference)refs.get(0);
for(int i=0; i<refs.size(); i++) {
ref = (Reference) refs.get(i);
log.debug("**** ref.Id="+ref.getId());
log.debug("**** ref.name="+ref.getProperties().getProperty( ref.getProperties().getNamePropDisplayName()));
SectionAttachmentIfc newAttach = assessmentService.createSectionAttachment(
section,
ref.getId(), ref.getProperties().getProperty(
ref.getProperties().getNamePropDisplayName()),
protocol);
attachmentSet.add(newAttach);
}
}
}
session.removeAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS);
session.removeAttribute(FilePickerHelper.FILE_PICKER_CANCEL);
ArrayList list = new ArrayList();
Iterator iter = attachmentSet.iterator();
while (iter.hasNext()){
SectionAttachmentIfc a = (SectionAttachmentIfc)iter.next();
list.add(a);
}
return list;
}
*/
}
|
samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/listener/author/SavePartListener.java
|
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the"License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.ui.listener.author;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashSet;
import java.util.Set;
import java.util.ResourceBundle;
import javax.faces.context.FacesContext;
import javax.faces.application.FacesMessage;
import javax.faces.event.AbortProcessingException;
import javax.faces.event.ActionEvent;
import javax.faces.event.ActionListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionAttachmentIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionDataIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionMetaDataIfc;
import org.sakaiproject.tool.assessment.facade.AssessmentFacade;
import org.sakaiproject.tool.assessment.facade.QuestionPoolFacade;
import org.sakaiproject.tool.assessment.facade.AgentFacade;
import org.sakaiproject.tool.assessment.facade.ItemFacade;
import org.sakaiproject.tool.assessment.facade.SectionFacade;
import org.sakaiproject.tool.assessment.services.ItemService;
import org.sakaiproject.tool.assessment.services.QuestionPoolService;
import org.sakaiproject.tool.assessment.services.assessment.AssessmentService;
import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentBean;
import org.sakaiproject.tool.assessment.ui.bean.author.SectionBean;
import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil;
/**
* <p>Title: Samigo</p>2
* <p>Description: Sakai Assessment Manager</p>
* <p>Copyright: Copyright (c) 2004 Sakai Project</p>
* <p>Organization: Sakai Project</p>
* @author Ed Smiley
* @version $Id$
*/
public class SavePartListener
implements ActionListener
{
private static Log log = LogFactory.getLog(SavePartListener.class);
//private static ContextUtil cu;
public SavePartListener()
{
}
public void processAction(ActionEvent ae) throws AbortProcessingException
{
FacesContext context = FacesContext.getCurrentInstance();
//Map reqMap = context.getExternalContext().getRequestMap();
//Map requestParams = context.getExternalContext().getRequestParameterMap();
AssessmentBean assessmentBean = (AssessmentBean) ContextUtil.lookupBean(
"assessmentBean");
String assessmentId = assessmentBean.getAssessmentId();
SectionBean sectionBean= (SectionBean) ContextUtil.lookupBean(
"sectionBean");
// create an assessment based on the title entered and the assessment
// template selected
// #1 - read from form editpart.jsp
String title = (sectionBean.getSectionTitle()).trim();
String description = sectionBean.getSectionDescription();
String sectionId = sectionBean.getSectionId();
AssessmentService assessmentService = new AssessmentService();
SectionFacade section;
if (sectionId.equals("")){
section = addPart(assessmentId);
log.debug("**** section="+section);
sectionBean.setSection(section);
sectionId = section.getSectionId().toString();
}
else {
section = assessmentService.getSection(sectionId);
}
//Long assessmentId = section.getAssessmentId();
boolean addItemsFromPool = false;
sectionBean.setOutcome("editAssessment");
if((sectionBean.getType().equals("2"))&& (sectionBean.getSelectedPool().equals(""))){
String selectedPool_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","selectedPool_error");
context.addMessage(null,new FacesMessage(selectedPool_err));
sectionBean.setOutcome("editPart");
return ;
}
if (!("".equals(sectionBean.getType())) && ((SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()).equals(sectionBean.getType()))) {
addItemsFromPool = true;
if (validateItemsDrawn(sectionBean)) {
// if the author type was random draw type, and the new type is random draw , then we need to disassociate sectionid with each items. Cannot delete items, 'cuz these items are linked in the pool
if( (section !=null) && (section.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE)!=null) && (section.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE).equals(SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()))) {
assessmentService.removeAllItems(sectionId);
// need to reload
section = assessmentService.getSection(sectionId);
}
}
else {
sectionBean.setOutcome("editPart");
return;
}
}
log.warn("**** section title ="+section.getTitle());
log.warn("**** title ="+title);
// if (title != null & !title.equals("")) // There is no spec saying we don't allow empty string for title , SAK-4211
if (title != null)
section.setTitle(title);
section.setDescription(description);
// TODO: Need to save Type, Question Ordering, and Metadata
if (!("".equals(sectionBean.getKeyword())))
section.addSectionMetaData(SectionMetaDataIfc.KEYWORDS, sectionBean.getKeyword());
if (!("".equals(sectionBean.getObjective())))
section.addSectionMetaData(SectionMetaDataIfc.OBJECTIVES, sectionBean.getObjective());
if (!("".equals(sectionBean.getRubric())))
section.addSectionMetaData(SectionMetaDataIfc.RUBRICS, sectionBean.getRubric());
if (!("".equals(sectionBean.getQuestionOrdering())))
section.addSectionMetaData(SectionDataIfc.QUESTIONS_ORDERING, sectionBean.getQuestionOrdering());
if (!("".equals(sectionBean.getType()))) {
section.addSectionMetaData(SectionDataIfc.AUTHOR_TYPE, sectionBean.getType());
if ((SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()).equals(sectionBean.getType())) {
if ((sectionBean.getNumberSelected()!=null) && !("".equals(sectionBean.getNumberSelected())))
{
section.addSectionMetaData(SectionDataIfc.NUM_QUESTIONS_DRAWN, sectionBean.getNumberSelected());
}
if (!("".equals(sectionBean.getSelectedPool())))
{
section.addSectionMetaData(SectionDataIfc.POOLID_FOR_RANDOM_DRAW, sectionBean.getSelectedPool());
String poolname = "";
QuestionPoolService qpservice = new QuestionPoolService();
QuestionPoolFacade poolfacade = qpservice.getPool(new Long(sectionBean.getSelectedPool()), AgentFacade.getAgentString());
if (poolfacade!=null) {
poolname = poolfacade.getTitle();
}
section.addSectionMetaData(SectionDataIfc.POOLNAME_FOR_RANDOM_DRAW, poolname);
}
}
// attach item attachemnt to sectionBean
List attachmentList = section.getSectionAttachmentList();
sectionBean.setAttachmentList(attachmentList);
if (attachmentList != null && attachmentList.size() >0){
sectionBean.setHasAttachment(true);
}
else{
sectionBean.setHasAttachment(false);
}
}
// if author-type is random draw from pool, add all items from pool now
// Note: a pool can only be randomly drawn by one part. if part A is created to randomly draw from pool 1, and you create part B, and select the same pool 1, all items from part A will be removed. (item.sectionId will be set to sectionId of part B.
// currently if a pool is selected by one random draw part it will no longer show up in the poollist for random draw
if (addItemsFromPool)
{
QuestionPoolService qpservice = new QuestionPoolService();
//ItemService itemservice = new ItemService();
ArrayList itemlist = qpservice.getAllItems(new Long(sectionBean.getSelectedPool()) );
int i = 0;
Iterator iter = itemlist.iterator();
while(iter.hasNext())
{
ItemFacade item= (ItemFacade) iter.next();
item.setSection(section);
item.setSequence(new Integer(i+1));
section.addItem(item);
i= i+1;
}
}
assessmentService.saveOrUpdateSection(section);
// #2 - goto editAssessment.jsp, so reset assessmentBean
AssessmentFacade assessment = assessmentService.getAssessment(
assessmentBean.getAssessmentId());
assessmentBean.setAssessment(assessment);
assessmentService.updateAssessmentLastModifiedInfo(assessment);
}
public SectionFacade addPart(String assessmentId){
AssessmentService assessmentService = new AssessmentService();
SectionFacade section = assessmentService.addSection(
assessmentId);
return section;
}
public boolean validateItemsDrawn(SectionBean sectionBean){
FacesContext context = FacesContext.getCurrentInstance();
String numberDrawn = sectionBean.getNumberSelected();
String err;
QuestionPoolService qpservice = new QuestionPoolService();
ArrayList itemlist = qpservice.getAllItems(new Long(sectionBean.getSelectedPool()) );
int itemcount = itemlist.size();
String itemcountString=" "+Integer.toString(itemcount);
try{
int numberDrawnInt = Integer.parseInt(numberDrawn);
if(numberDrawnInt <=0 || numberDrawnInt>itemcount){
err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","qdrawn_error");
context.addMessage(null,new FacesMessage(err+itemcountString ));
return false;
}
} catch(NumberFormatException e){
err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","qdrawn_error");
context.addMessage(null,new FacesMessage(err+itemcountString ));
return false;
}
return true;
}
/*
private ArrayList prepareSectionAttachment(SectionDataIfc section){
Set attachmentSet = section.getSectionAttachmentSet();
if (attachmentSet == null){
attachmentSet = new HashSet();
}
log.debug("*** attachment size="+attachmentSet.size());
AssessmentService assessmentService = new AssessmentService();
String protocol = ContextUtil.getProtocol();
ToolSession session = SessionManager.getCurrentToolSession();
if (session.getAttribute(FilePickerHelper.FILE_PICKER_CANCEL) == null &&
session.getAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS) != null) {
List refs = (List)session.getAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS);
if (refs!=null && refs.size() > 0){
Reference ref = (Reference)refs.get(0);
for(int i=0; i<refs.size(); i++) {
ref = (Reference) refs.get(i);
log.debug("**** ref.Id="+ref.getId());
log.debug("**** ref.name="+ref.getProperties().getProperty( ref.getProperties().getNamePropDisplayName()));
SectionAttachmentIfc newAttach = assessmentService.createSectionAttachment(
section,
ref.getId(), ref.getProperties().getProperty(
ref.getProperties().getNamePropDisplayName()),
protocol);
attachmentSet.add(newAttach);
}
}
}
session.removeAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS);
session.removeAttribute(FilePickerHelper.FILE_PICKER_CANCEL);
ArrayList list = new ArrayList();
Iterator iter = attachmentSet.iterator();
while (iter.hasNext()){
SectionAttachmentIfc a = (SectionAttachmentIfc)iter.next();
list.add(a);
}
return list;
}
*/
}
|
use log.debug instead of log.warn for debug messages
git-svn-id: 574bb14f304dbe16c01253ed6697ea749724087f@16943 66ffb92e-73f9-0310-93c1-f5514f145a0a
|
samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/listener/author/SavePartListener.java
|
use log.debug instead of log.warn for debug messages
|
|
Java
|
apache-2.0
|
e5289b24ff4a9e9c2af43bb3faf28314c6fc8725
| 0
|
OpenNTF/SmartNSF
|
package org.openntf.xrest.xsp.exec.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.openntf.xrest.xsp.dsl.DSLBuilder;
import org.openntf.xrest.xsp.exec.Context;
import org.openntf.xrest.xsp.exec.ExecutorException;
import org.openntf.xrest.xsp.exec.RouteProcessorExecutor;
import org.openntf.xrest.xsp.exec.output.ExecutorExceptionProcessor;
import org.openntf.xrest.xsp.model.DataContainer;
import org.openntf.xrest.xsp.model.EventException;
import org.openntf.xrest.xsp.model.EventType;
import org.openntf.xrest.xsp.model.RouteProcessor;
import com.ibm.commons.util.io.json.JsonException;
import groovy.lang.Closure;
import lotus.domino.Document;
public abstract class AbstractRouteProcessorExecutor implements RouteProcessorExecutor {
protected final Context context;
protected abstract void submitValues() throws IOException, JsonException, ExecutorException;
protected abstract void preSubmitValues() throws ExecutorException;
protected final RouteProcessor routeProcessor;
protected final String path;
protected DataContainer<?> dataContainer;
public AbstractRouteProcessorExecutor(final Context context, final RouteProcessor routeProcessor, final String path) {
this.path = path;
this.routeProcessor = routeProcessor;
this.context = context;
}
@Override
public void execute() {
try {
checkAccess();
validateRequest();
preLoadDocument();
loadDocument();
postNewDocument();
postLoadDocument();
executeMethodeSpecific(this.context, this.dataContainer);
preSubmitValues();
submitValues();
} catch (ExecutorException ex) {
try {
ExecutorExceptionProcessor.INSTANCE.processExecutorException(ex, context.getResponse());
} catch (Exception e) {
e.printStackTrace();
}
} catch (JsonException ex) {
try {
ExecutorExceptionProcessor.INSTANCE.processGeneralException(500, ex, context.getResponse());
} catch (Exception e) {
e.printStackTrace();
}
} catch (IOException ex) {
try {
ExecutorExceptionProcessor.INSTANCE.processGeneralException(500, ex, context.getResponse());
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void checkAccess() throws ExecutorException {
// TODO: Looser! You missing the context
List<String> allowedUsersAndGroups = routeProcessor.getAccessGroups();
if (allowedUsersAndGroups == null || allowedUsersAndGroups.isEmpty()) {
return;
}
List<String> myGroups = new ArrayList<String>();
myGroups.add(context.getUserName());
myGroups.addAll(context.getGroups());
myGroups.addAll(context.getRoles());
for (String me : myGroups) {
if (allowedUsersAndGroups.contains(me)) {
return;
}
}
throw new ExecutorException(403, "Access denied for user " + context.getUserName(), path, "checkAccess");
}
private void validateRequest() throws ExecutorException {
try {
Closure<?> cl = routeProcessor.getEventClosure(EventType.VALIDATE);
if (cl != null) {
DSLBuilder.callClosure(cl, context);
}
} catch (EventException e) {
throw new ExecutorException(400, "Validation Error: " + e.getMessage(), e, path, "validation");
} catch (Exception e) {
throw new ExecutorException(500, "Runtime Error: " + e.getMessage(), e, path, "validation");
}
}
private void preLoadDocument() throws ExecutorException {
try {
Closure<?> cl = routeProcessor.getEventClosure(EventType.PRE_LOAD_DOCUMENT);
if (cl != null) {
DSLBuilder.callClosure(cl, context);
}
} catch (EventException e) {
throw new ExecutorException(400, "Pre Load Error: " + e.getMessage(), e, path, "preloadmodel");
} catch (Exception e) {
throw new ExecutorException(500, "Runtime Error: " + e.getMessage(), e, path, "preloadmodel");
}
}
private void loadDocument() throws ExecutorException {
dataContainer = routeProcessor.getDataContainer(context);
}
private void postNewDocument() throws ExecutorException {
if (dataContainer.isList() || dataContainer.isBinary()) {
return;
}
try {
Document doc = (Document) dataContainer.getData();
Closure<?> cl = routeProcessor.getEventClosure(EventType.POST_NEW);
if (cl != null && doc.isNewNote()) {
DSLBuilder.callClosure(cl, context, doc);
}
} catch (EventException e) {
throw new ExecutorException(400, "Post Load Error: " + e.getMessage(), e, path, "postloadmodel");
} catch (Exception e) {
throw new ExecutorException(500, "Runtime Error: " + e.getMessage(), e, path, "postloadmodel");
}
}
private void postLoadDocument() throws ExecutorException {
try {
Closure<?> cl = routeProcessor.getEventClosure(EventType.POST_LOAD_DOCUMENT);
if (cl != null) {
DSLBuilder.callClosure(cl, context, dataContainer.getData());
}
} catch (EventException e) {
throw new ExecutorException(400, "Post Load Error: " + e.getMessage(), e, path, "postloadmodel");
} catch (Exception e) {
throw new ExecutorException(500, "Runtime Error: " + e.getMessage(), e, path, "postloadmodel");
}
}
protected abstract void executeMethodeSpecific(Context context, DataContainer<?> container) throws ExecutorException;
public void setDataContainer(final DataContainer<?> container) {
this.dataContainer = container;
}
protected RouteProcessor getRouteProcessor() {
return routeProcessor;
}
protected String getPath() {
return path;
}
}
|
server/org.openntf.xrest.xsp/src/org/openntf/xrest/xsp/exec/impl/AbstractRouteProcessorExecutor.java
|
package org.openntf.xrest.xsp.exec.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.openntf.xrest.xsp.dsl.DSLBuilder;
import org.openntf.xrest.xsp.exec.Context;
import org.openntf.xrest.xsp.exec.ExecutorException;
import org.openntf.xrest.xsp.exec.RouteProcessorExecutor;
import org.openntf.xrest.xsp.exec.output.ExecutorExceptionProcessor;
import org.openntf.xrest.xsp.model.DataContainer;
import org.openntf.xrest.xsp.model.EventException;
import org.openntf.xrest.xsp.model.EventType;
import org.openntf.xrest.xsp.model.RouteProcessor;
import com.ibm.commons.util.io.json.JsonException;
import groovy.lang.Closure;
import lotus.domino.Document;
public abstract class AbstractRouteProcessorExecutor implements RouteProcessorExecutor{
protected final Context context;
protected abstract void submitValues() throws IOException, JsonException, ExecutorException;
protected abstract void preSubmitValues() throws ExecutorException;
protected final RouteProcessor routeProcessor;
protected final String path;
protected DataContainer<?> dataContainer;
public AbstractRouteProcessorExecutor(Context context, RouteProcessor routeProcessor, String path) {
this.path = path;
this.routeProcessor = routeProcessor;
this.context = context;
}
@Override
public void execute() {
try {
checkAccess();
validateRequest();
preLoadDocument();
loadDocument();
postNewDocument();
postLoadDocument();
executeMethodeSpecific(this.context, this.dataContainer);
preSubmitValues();
submitValues();
} catch (ExecutorException ex) {
try {
ExecutorExceptionProcessor.INSTANCE.processExecutorException(ex, context.getResponse());
} catch (Exception e) {
e.printStackTrace();
}
} catch (JsonException ex) {
try {
ExecutorExceptionProcessor.INSTANCE.processGeneralException(500, ex, context.getResponse());
} catch (Exception e) {
e.printStackTrace();
}
} catch (IOException ex) {
try {
ExecutorExceptionProcessor.INSTANCE.processGeneralException(500, ex, context.getResponse());
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void checkAccess() throws ExecutorException {
//TODO: Looser! You missing the context
List<String> allowedUsersAndGroups = routeProcessor.getAccessGroups();
if (allowedUsersAndGroups == null || allowedUsersAndGroups.isEmpty()) {
return;
}
List<String> myGroups = new ArrayList<String>();
myGroups.add(context.getUserName());
myGroups.addAll(context.getGroups());
myGroups.addAll(context.getRoles());
for (String me : myGroups) {
if (allowedUsersAndGroups.contains(me)) {
return;
}
}
throw new ExecutorException(403, "Access denied for user " + context.getUserName(), path, "checkAccess");
}
private void validateRequest() throws ExecutorException {
try {
Closure<?> cl = routeProcessor.getEventClosure(EventType.VALIDATE);
if (cl != null) {
DSLBuilder.callClosure(cl, context);
}
} catch (EventException e) {
throw new ExecutorException(400, "Validation Error: " + e.getMessage(), e, path, "validation");
} catch (Exception e) {
throw new ExecutorException(500, "Runntime Error: " + e.getMessage(), e, path, "validation");
}
}
private void preLoadDocument() throws ExecutorException {
try {
Closure<?> cl = routeProcessor.getEventClosure(EventType.PRE_LOAD_DOCUMENT);
if (cl != null) {
DSLBuilder.callClosure(cl, context);
}
} catch (EventException e) {
throw new ExecutorException(400, "Pre Load Error: " + e.getMessage(), e, path, "preloadmodel");
} catch (Exception e) {
throw new ExecutorException(500, "Runntime Error: " + e.getMessage(), e, path, "preloadmodel");
}
}
private void loadDocument() throws ExecutorException {
dataContainer = routeProcessor.getDataContainer(context);
}
private void postNewDocument() throws ExecutorException {
if (dataContainer.isList() || dataContainer.isBinary()) {
return;
}
try {
Document doc = (Document) dataContainer.getData();
Closure<?> cl = routeProcessor.getEventClosure(EventType.POST_NEW);
if (cl != null && doc.isNewNote()) {
DSLBuilder.callClosure(cl, context, doc);
}
} catch (EventException e) {
throw new ExecutorException(400, "Post Load Error: " + e.getMessage(), e, path, "postloadmodel");
} catch (Exception e) {
throw new ExecutorException(500, "Runntime Error: " + e.getMessage(), e, path, "postloadmodel");
}
}
private void postLoadDocument() throws ExecutorException {
try {
Closure<?> cl = routeProcessor.getEventClosure(EventType.POST_LOAD_DOCUMENT);
if (cl != null) {
DSLBuilder.callClosure(cl, context, dataContainer.getData());
}
} catch (EventException e) {
throw new ExecutorException(400, "Post Load Error: " + e.getMessage(), e, path, "postloadmodel");
} catch (Exception e) {
throw new ExecutorException(500, "Runntime Error: " + e.getMessage(), e, path, "postloadmodel");
}
}
protected abstract void executeMethodeSpecific(Context context, DataContainer<?> container) throws ExecutorException;
public void setDataContainer(DataContainer<?> container) {
this.dataContainer = container;
}
protected RouteProcessor getRouteProcessor() {
return routeProcessor;
}
protected String getPath() {
return path;
}
}
|
fix typo
|
server/org.openntf.xrest.xsp/src/org/openntf/xrest/xsp/exec/impl/AbstractRouteProcessorExecutor.java
|
fix typo
|
|
Java
|
apache-2.0
|
44d0f92ac4eab5baf24535013f84be112c695a62
| 0
|
dcsch/typecast
|
/*
============================================================================
The Apache Software License, Version 1.1
============================================================================
Copyright (C) 1999-2003 The Apache Software Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without modifica-
tion, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The end-user documentation included with the redistribution, if any, must
include the following acknowledgment: "This product includes software
developed by the Apache Software Foundation (http://www.apache.org/)."
Alternately, this acknowledgment may appear in the software itself, if
and wherever such third-party acknowledgments normally appear.
4. The names "Batik" and "Apache Software Foundation" must not be
used to endorse or promote products derived from this software without
prior written permission. For written permission, please contact
apache@apache.org.
5. Products derived from this software may not be called "Apache", nor may
"Apache" appear in their name, without prior written permission of the
Apache Software Foundation.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
APACHE SOFTWARE FOUNDATION OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLU-
DING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This software consists of voluntary contributions made by many individuals
on behalf of the Apache Software Foundation. For more information on the
Apache Software Foundation, please see <http://www.apache.org/>.
*/
package net.java.dev.typecast.ot;
import java.io.DataInputStream;
import java.io.IOException;
import net.java.dev.typecast.ot.table.DirectoryEntry;
import net.java.dev.typecast.ot.table.TTCHeader;
import net.java.dev.typecast.ot.table.TableDirectory;
import net.java.dev.typecast.ot.table.Table;
import net.java.dev.typecast.ot.table.Os2Table;
import net.java.dev.typecast.ot.table.CmapTable;
import net.java.dev.typecast.ot.table.GlyfTable;
import net.java.dev.typecast.ot.table.HeadTable;
import net.java.dev.typecast.ot.table.HheaTable;
import net.java.dev.typecast.ot.table.HmtxTable;
import net.java.dev.typecast.ot.table.LocaTable;
import net.java.dev.typecast.ot.table.MaxpTable;
import net.java.dev.typecast.ot.table.NameTable;
import net.java.dev.typecast.ot.table.PostTable;
import net.java.dev.typecast.ot.table.TableFactory;
/**
* The TrueType font.
* @version $Id: OTFont.java,v 1.4 2007-01-30 03:51:40 davidsch Exp $
* @author <a href="mailto:davidsch@dev.java.net">David Schweinsberg</a>
*/
public class OTFont {
private OTFontCollection _fc;
private TableDirectory _tableDirectory = null;
private Table[] _tables;
private Os2Table _os2;
private CmapTable _cmap;
private GlyfTable _glyf;
private HeadTable _head;
private HheaTable _hhea;
private HmtxTable _hmtx;
private LocaTable _loca;
private MaxpTable _maxp;
private NameTable _name;
private PostTable _post;
/**
* Constructor
*/
public OTFont(OTFontCollection fc) {
_fc = fc;
}
public Table getTable(int tableType) {
for (int i = 0; i < _tables.length; i++) {
if ((_tables[i] != null) && (_tables[i].getType() == tableType)) {
return _tables[i];
}
}
return null;
}
public Os2Table getOS2Table() {
return _os2;
}
public CmapTable getCmapTable() {
return _cmap;
}
public HeadTable getHeadTable() {
return _head;
}
public HheaTable getHheaTable() {
return _hhea;
}
public HmtxTable getHmtxTable() {
return _hmtx;
}
public LocaTable getLocaTable() {
return _loca;
}
public MaxpTable getMaxpTable() {
return _maxp;
}
public NameTable getNameTable() {
return _name;
}
public PostTable getPostTable() {
return _post;
}
public int getAscent() {
return _hhea.getAscender();
}
public int getDescent() {
return _hhea.getDescender();
}
public int getNumGlyphs() {
return _maxp.getNumGlyphs();
}
public Glyph getGlyph(int i) {
return (_glyf.getDescription(i) != null)
? new Glyph(
_glyf.getDescription(i),
_hmtx.getLeftSideBearing(i),
_hmtx.getAdvanceWidth(i))
: null;
}
public TableDirectory getTableDirectory() {
return _tableDirectory;
}
private Table readTable(
DataInputStream dis,
int tablesOrigin,
int tag) throws IOException {
dis.reset();
DirectoryEntry entry = _tableDirectory.getEntryByTag(tag);
dis.skip(tablesOrigin + entry.getOffset());
return TableFactory.create(_fc, this, entry, dis);
}
/**
* @param dis OpenType/TrueType font file data.
* @param directoryOffset The Table Directory offset within the file. For a
* regular TTF/OTF file this will be zero, but for a TTC (Font Collection)
* the offset is retrieved from the TTC header. For a Mac font resource,
* offset is retrieved from the resource headers.
* @param tablesOrigin The point the table offsets are calculated from.
* Once again, in a regular TTF file, this will be zero. In a TTC is is
* also zero, but within a Mac resource, it is the beggining of the
* individual font resource data.
*/
protected void read(
DataInputStream dis,
int directoryOffset,
int tablesOrigin) throws IOException {
// Load the table directory
dis.reset();
dis.skip(directoryOffset);
_tableDirectory = new TableDirectory(dis);
_tables = new Table[_tableDirectory.getNumTables()];
// Load some prerequisite tables
_head = (HeadTable) readTable(dis, tablesOrigin, Table.head);
_hhea = (HheaTable) readTable(dis, tablesOrigin, Table.hhea);
_maxp = (MaxpTable) readTable(dis, tablesOrigin, Table.maxp);
_loca = (LocaTable) readTable(dis, tablesOrigin, Table.loca);
int index = 0;
_tables[index++] = _head;
_tables[index++] = _hhea;
_tables[index++] = _maxp;
if (_loca != null) {
_tables[index++] = _loca;
}
// Load all other tables
for (int i = 0; i < _tableDirectory.getNumTables(); i++) {
DirectoryEntry entry = _tableDirectory.getEntry(i);
if (entry.getTag() == Table.head
|| entry.getTag() == Table.hhea
|| entry.getTag() == Table.maxp
|| entry.getTag() == Table.loca) {
continue;
}
dis.reset();
dis.skip(tablesOrigin + entry.getOffset());
_tables[index] = TableFactory.create(_fc, this, entry, dis);
++index;
}
// Get references to commonly used tables (these happen to be all the
// required tables)
_cmap = (CmapTable) getTable(Table.cmap);
_hmtx = (HmtxTable) getTable(Table.hmtx);
_name = (NameTable) getTable(Table.name);
_os2 = (Os2Table) getTable(Table.OS_2);
_post = (PostTable) getTable(Table.post);
// If this is a TrueType outline, then we'll have at least the
// 'glyf' table (along with the 'loca' table)
_glyf = (GlyfTable) getTable(Table.glyf);
}
public String toString() {
if (_tableDirectory != null) {
return _tableDirectory.toString();
} else {
return "Empty font";
}
}
}
|
src/net/java/dev/typecast/ot/OTFont.java
|
/*
============================================================================
The Apache Software License, Version 1.1
============================================================================
Copyright (C) 1999-2003 The Apache Software Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without modifica-
tion, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The end-user documentation included with the redistribution, if any, must
include the following acknowledgment: "This product includes software
developed by the Apache Software Foundation (http://www.apache.org/)."
Alternately, this acknowledgment may appear in the software itself, if
and wherever such third-party acknowledgments normally appear.
4. The names "Batik" and "Apache Software Foundation" must not be
used to endorse or promote products derived from this software without
prior written permission. For written permission, please contact
apache@apache.org.
5. Products derived from this software may not be called "Apache", nor may
"Apache" appear in their name, without prior written permission of the
Apache Software Foundation.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
APACHE SOFTWARE FOUNDATION OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLU-
DING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This software consists of voluntary contributions made by many individuals
on behalf of the Apache Software Foundation. For more information on the
Apache Software Foundation, please see <http://www.apache.org/>.
*/
package net.java.dev.typecast.ot;
import java.io.DataInputStream;
import java.io.IOException;
import net.java.dev.typecast.ot.table.TTCHeader;
import net.java.dev.typecast.ot.table.TableDirectory;
import net.java.dev.typecast.ot.table.Table;
import net.java.dev.typecast.ot.table.Os2Table;
import net.java.dev.typecast.ot.table.CmapTable;
import net.java.dev.typecast.ot.table.GlyfTable;
import net.java.dev.typecast.ot.table.HeadTable;
import net.java.dev.typecast.ot.table.HheaTable;
import net.java.dev.typecast.ot.table.HmtxTable;
import net.java.dev.typecast.ot.table.LocaTable;
import net.java.dev.typecast.ot.table.MaxpTable;
import net.java.dev.typecast.ot.table.NameTable;
import net.java.dev.typecast.ot.table.PostTable;
import net.java.dev.typecast.ot.table.TableFactory;
/**
* The TrueType font.
* @version $Id: OTFont.java,v 1.3 2004-12-21 10:20:06 davidsch Exp $
* @author <a href="mailto:davidsch@dev.java.net">David Schweinsberg</a>
*/
public class OTFont {
private OTFontCollection _fc;
private TableDirectory _tableDirectory = null;
private Table[] _tables;
private Os2Table _os2;
private CmapTable _cmap;
private GlyfTable _glyf;
private HeadTable _head;
private HheaTable _hhea;
private HmtxTable _hmtx;
private LocaTable _loca;
private MaxpTable _maxp;
private NameTable _name;
private PostTable _post;
/**
* Constructor
*/
public OTFont(OTFontCollection fc) {
_fc = fc;
}
public Table getTable(int tableType) {
for (int i = 0; i < _tables.length; i++) {
if ((_tables[i] != null) && (_tables[i].getType() == tableType)) {
return _tables[i];
}
}
return null;
}
public Os2Table getOS2Table() {
return _os2;
}
public CmapTable getCmapTable() {
return _cmap;
}
public HeadTable getHeadTable() {
return _head;
}
public HheaTable getHheaTable() {
return _hhea;
}
public HmtxTable getHmtxTable() {
return _hmtx;
}
public LocaTable getLocaTable() {
return _loca;
}
public MaxpTable getMaxpTable() {
return _maxp;
}
public NameTable getNameTable() {
return _name;
}
public PostTable getPostTable() {
return _post;
}
public int getAscent() {
return _hhea.getAscender();
}
public int getDescent() {
return _hhea.getDescender();
}
public int getNumGlyphs() {
return _maxp.getNumGlyphs();
}
public Glyph getGlyph(int i) {
return (_glyf.getDescription(i) != null)
? new Glyph(
_glyf.getDescription(i),
_hmtx.getLeftSideBearing(i),
_hmtx.getAdvanceWidth(i))
: null;
}
public TableDirectory getTableDirectory() {
return _tableDirectory;
}
/**
* @param dis OpenType/TrueType font file data.
* @param directoryOffset The Table Directory offset within the file. For a
* regular TTF/OTF file this will be zero, but for a TTC (Font Collection)
* the offset is retrieved from the TTC header. For a Mac font resource,
* offset is retrieved from the resource headers.
* @param tablesOrigin The point the table offsets are calculated from.
* Once again, in a regular TTF file, this will be zero. In a TTC is is
* also zero, but within a Mac resource, it is the beggining of the
* individual font resource data.
*/
protected void read(
DataInputStream dis,
int directoryOffset,
int tablesOrigin) throws IOException {
// Load the table directory
dis.reset();
dis.skip(directoryOffset);
_tableDirectory = new TableDirectory(dis);
_tables = new Table[_tableDirectory.getNumTables()];
// Load each of the tables
for (int i = 0; i < _tableDirectory.getNumTables(); i++) {
dis.reset();
dis.skip(tablesOrigin + _tableDirectory.getEntry(i).getOffset());
_tables[i] =
TableFactory.create(_fc, _tableDirectory.getEntry(i), dis);
}
// Get references to commonly used tables (these happen to be all the
// required tables)
_cmap = (CmapTable) getTable(Table.cmap);
_head = (HeadTable) getTable(Table.head);
_hhea = (HheaTable) getTable(Table.hhea);
_hmtx = (HmtxTable) getTable(Table.hmtx);
_maxp = (MaxpTable) getTable(Table.maxp);
_name = (NameTable) getTable(Table.name);
_os2 = (Os2Table) getTable(Table.OS_2);
_post = (PostTable) getTable(Table.post);
// If this is a TrueType outline, then we'll have at least the
// following tables
_glyf = (GlyfTable) getTable(Table.glyf);
_loca = (LocaTable) getTable(Table.loca);
// Initialize the tables that require it
_hmtx.init(
_hhea.getNumberOfHMetrics(),
_maxp.getNumGlyphs() - _hhea.getNumberOfHMetrics());
if (_glyf != null) {
_loca.init(_maxp.getNumGlyphs(), _head.getIndexToLocFormat() == 0);
_glyf.init(_maxp.getNumGlyphs(), _loca);
}
}
public String toString() {
if (_tableDirectory != null) {
return _tableDirectory.toString();
} else {
return "Empty font";
}
}
}
|
Restructured table loading so that prerequesite tables are loaded first, which are then referenced whilst loading tables that need information from them. This eliminates the need for a separate 'init' stage for those tables.
|
src/net/java/dev/typecast/ot/OTFont.java
|
Restructured table loading so that prerequesite tables are loaded first, which are then referenced whilst loading tables that need information from them. This eliminates the need for a separate 'init' stage for those tables.
|
|
Java
|
apache-2.0
|
9cb22033d12be5f52a26376a2b833ead97aafc1c
| 0
|
Jasig/cas,leleuj/cas,apereo/cas,pdrados/cas,leleuj/cas,fogbeam/cas_mirror,pdrados/cas,philliprower/cas,rkorn86/cas,apereo/cas,leleuj/cas,apereo/cas,fogbeam/cas_mirror,apereo/cas,Jasig/cas,Jasig/cas,pdrados/cas,apereo/cas,rkorn86/cas,philliprower/cas,philliprower/cas,rkorn86/cas,pdrados/cas,philliprower/cas,leleuj/cas,fogbeam/cas_mirror,fogbeam/cas_mirror,Jasig/cas,philliprower/cas,leleuj/cas,pdrados/cas,apereo/cas,philliprower/cas,fogbeam/cas_mirror,rkorn86/cas,philliprower/cas,fogbeam/cas_mirror,apereo/cas,pdrados/cas,leleuj/cas
|
package org.apereo.cas.aup;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.authentication.Credential;
import org.apereo.cas.ticket.registry.TicketRegistrySupport;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.web.support.WebUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.test.MockRequestContext;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* This is {@link DefaultAcceptableUsagePolicyRepositoryTests}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
@RunWith(SpringRunner.class)
public class DefaultAcceptableUsagePolicyRepositoryTests {
@Test
public void verifyAction() {
final MockRequestContext context = new MockRequestContext();
final MockHttpServletRequest request = new MockHttpServletRequest();
context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse()));
final TicketRegistrySupport support = mock(TicketRegistrySupport.class);
when(support.getAuthenticatedPrincipalFrom(anyString()))
.thenReturn(CoreAuthenticationTestUtils.getPrincipal(CollectionUtils.wrap("carLicense", "false")));
final DefaultAcceptableUsagePolicyRepository repo = new DefaultAcceptableUsagePolicyRepository(support);
WebUtils.putAuthentication(CoreAuthenticationTestUtils.getAuthentication(), context);
WebUtils.putTicketGrantingTicketInScopes(context, "TGT-12345");
final Credential c = CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword("casaup");
assertFalse(repo.verify(context, c).getLeft());
assertTrue(repo.submit(context, c));
assertTrue(repo.verify(context, c).getLeft());
}
}
|
support/cas-server-support-aup-core/src/test/java/org/apereo/cas/aup/DefaultAcceptableUsagePolicyRepositoryTests.java
|
package org.apereo.cas.aup;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.authentication.Credential;
import org.apereo.cas.ticket.registry.TicketRegistrySupport;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.web.support.WebUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.test.MockRequestContext;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* This is {@link DefaultAcceptableUsagePolicyRepositoryTests}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
@RunWith(SpringRunner.class)
public class DefaultAcceptableUsagePolicyRepositoryTests {
@Test
public void verifyAction() {
final MockRequestContext context = new MockRequestContext();
final MockHttpServletRequest request = new MockHttpServletRequest();
context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse()));
final TicketRegistrySupport support = mock(TicketRegistrySupport.class);
when(support.getAuthenticatedPrincipalFrom(anyString()))
.thenReturn(CoreAuthenticationTestUtils.getPrincipal(CollectionUtils.wrap("carLicense", "false")));
final DefaultAcceptableUsagePolicyRepository repo = new DefaultAcceptableUsagePolicyRepository(support);
WebUtils.putTicketGrantingTicketInScopes(context, "TGT-12345");
final Credential c = CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword("casaup");
assertFalse(repo.verify(context, c).getLeft());
assertTrue(repo.submit(context, c));
assertTrue(repo.verify(context, c).getLeft());
}
}
|
fix tests
|
support/cas-server-support-aup-core/src/test/java/org/apereo/cas/aup/DefaultAcceptableUsagePolicyRepositoryTests.java
|
fix tests
|
|
Java
|
apache-2.0
|
075768380ac72b5b1c743baf4264ac79a43254f5
| 0
|
this/carbon-uuf,wso2/carbon-uuf,wso2/carbon-uuf,this/carbon-uuf,sajithar/carbon-uuf,wso2/carbon-uuf,this/carbon-uuf,wso2/carbon-uuf,this/carbon-uuf,sajithar/carbon-uuf
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.uuf.renderablecreator.hbs.helpers.runtime;
import com.github.jknack.handlebars.Helper;
import com.github.jknack.handlebars.Options;
import org.wso2.carbon.uuf.api.config.Configuration;
import org.wso2.carbon.uuf.api.config.I18nResources;
import org.wso2.carbon.uuf.core.Lookup;
import org.wso2.carbon.uuf.core.RequestLookup;
import org.wso2.carbon.uuf.renderablecreator.hbs.core.HbsRenderable;
import org.wso2.carbon.uuf.spi.HttpRequest;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
public class I18nHelper implements Helper<String> {
public static final String HELPER_NAME = "i18n";
private static final Locale FALLBACK_LOCALE = Locale.ENGLISH;
private static final String DATA_KEY_CURRENT_REQUEST_LOCALE = "CURRENT_LOCALE";
@Override
public CharSequence apply(String key, Options options) throws IOException {
if (key == null) {
throw new IllegalArgumentException("Key of a translating string cannot be null.");
}
Locale locale;
Lookup lookup = options.data(HbsRenderable.DATA_KEY_LOOKUP);
// First priority is given to the passed locale parameter. {{i18n "key" locale="en-US"}}
locale = computeLocale(options.hash);
if (locale == null) {
// Check whether we have already computed the locale for this request.
Locale currentRequestLocale = options.data(DATA_KEY_CURRENT_REQUEST_LOCALE);
if (currentRequestLocale == null) {
// Second priority is given to the accept language header of the request.
RequestLookup requestLookup = options.data(HbsRenderable.DATA_KEY_REQUEST_LOOKUP);
locale = computeLocale(requestLookup.getRequest(), lookup.getI18nResources());
if (locale == null) {
// Seems like we have failed to compute a locale in above approaches.
// Let's check whether a default locale is configured in the configuration.
locale = computeLocale(lookup.getConfiguration());
if (locale == null) {
// Since there is no other option, we choose fallback locale.
locale = FALLBACK_LOCALE;
}
}
options.data(DATA_KEY_CURRENT_REQUEST_LOCALE, locale);
} else {
locale = currentRequestLocale;
}
}
return lookup.getI18nResources().getMessage(locale, key, options.params, key);
}
private static Locale computeLocale(Map<String, Object> hashParams) {
Object localeParam = hashParams.get("locale");
if ((localeParam instanceof String) && !localeParam.toString().isEmpty()) {
return Locale.forLanguageTag(localeParam.toString());
} else {
return null;
}
}
private static Locale computeLocale(HttpRequest request, I18nResources i18nResources) {
String headerLocale = request.getHeaders().get(HttpRequest.HEADER_ACCEPT_LANGUAGE);
return i18nResources.getLocale(headerLocale);
}
private static Locale computeLocale(Configuration configuration) {
Object defaultLocale = configuration.other().get("defaultLocale");
if ((defaultLocale instanceof String) && !defaultLocale.toString().isEmpty()) {
return Locale.forLanguageTag(defaultLocale.toString());
} else {
return null;
}
}
}
|
components/uuf-renderablecreator-hbs/src/main/java/org/wso2/carbon/uuf/renderablecreator/hbs/helpers/runtime/I18nHelper.java
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.uuf.renderablecreator.hbs.helpers.runtime;
import com.github.jknack.handlebars.Helper;
import com.github.jknack.handlebars.Options;
import org.wso2.carbon.uuf.core.Lookup;
import org.wso2.carbon.uuf.core.RequestLookup;
import org.wso2.carbon.uuf.renderablecreator.hbs.core.HbsRenderable;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Locale;
import java.util.Properties;
public class I18nHelper implements Helper<String> {
public static final String HELPER_NAME = "i18n";
private static final String DEFAULT_LOCALE = "en-us";
private static final String LOCALE_HEADER = "Accept-Language";
private static final String DATA_KEY_CURRENT_LOCALE = "CURRENT_LOCALE";
@Override
public CharSequence apply(String key, Options options) throws IOException {
if (key == null) {
throw new IllegalArgumentException("Key of a translating string cannot be null.");
}
RequestLookup requestLookup = options.data(HbsRenderable.DATA_KEY_REQUEST_LOOKUP);
Lookup lookup = options.data(HbsRenderable.DATA_KEY_LOOKUP);
// Check whether the current locale is already available in the options.
Locale currentLocale = options.data(DATA_KEY_CURRENT_LOCALE);
// If not available, get the current locale.
if (currentLocale == null) {
Object localeParam = options.hash.get("locale");
if (localeParam != null) {
currentLocale = Locale.forLanguageTag(localeParam.toString());
} else {
Object localeHeaderValue = requestLookup.getRequest().getHeaders().get(LOCALE_HEADER);
currentLocale = lookup.getI18nResources().getLocale(localeHeaderValue.toString());
}
// Add the locale to the helper options. This will be used when evaluating this helper again in the same
// request. This is done to increase the performance.
options.data(DATA_KEY_CURRENT_LOCALE, currentLocale);
}
Properties props = lookup.getI18nResources().getI18nResource(currentLocale);
if (props != null) {
if (options.params.length == 0) {
return props.getProperty(key, key);
} else {
MessageFormat format = new MessageFormat(props.getProperty(key, key), currentLocale);
return format.format(options.params);
}
} else {
return key;
}
}
}
|
add weighted multi language support to i18n helper
|
components/uuf-renderablecreator-hbs/src/main/java/org/wso2/carbon/uuf/renderablecreator/hbs/helpers/runtime/I18nHelper.java
|
add weighted multi language support to i18n helper
|
|
Java
|
apache-2.0
|
32dd5c8dc0ad3afbbb8a12bb0901a2e90f27829d
| 0
|
Anoukh/carbon-analytics,tishan89/carbon-analytics,mohanvive/carbon-analytics,ramindu90/carbon-analytics-common,grainier/carbon-analytics,Anoukh/carbon-analytics,minudika/carbon-analytics,tishan89/carbon-analytics,wso2/carbon-analytics,Nethmi-Pathirana/carbon-analytics,minudika/carbon-analytics,tishan89/carbon-analytics,Anoukh/carbon-analytics,minudika/carbon-analytics,Anoukh/carbon-analytics,wso2/carbon-analytics,grainier/carbon-analytics,wso2/carbon-analytics,mohanvive/carbon-analytics,Niveathika92/carbon-analytics,erangatl/carbon-analytics,mohanvive/carbon-analytics,Nethmi-Pathirana/carbon-analytics,Nethmi-Pathirana/carbon-analytics,erangatl/carbon-analytics,Niveathika92/carbon-analytics,minudika/carbon-analytics,tishan89/carbon-analytics,minudika/carbon-analytics,Niveathika92/carbon-analytics,grainier/carbon-analytics,lasanthaS/carbon-analytics-common,Anoukh/carbon-analytics,ksdperera/carbon-analytics-common,ksdperera/carbon-analytics-common,dilini-muthumala/carbon-analytics-common,tishan89/carbon-analytics,wso2/carbon-analytics,Niveathika92/carbon-analytics,erangatl/carbon-analytics,ramindu90/carbon-analytics-common,sajithshn/carbon-analytics-common,wso2/carbon-analytics-common,Nethmi-Pathirana/carbon-analytics,Nethmi-Pathirana/carbon-analytics,wso2/carbon-analytics,erangatl/carbon-analytics,erangatl/carbon-analytics,lasanthaS/carbon-analytics-common,mohanvive/carbon-analytics,mohanvive/carbon-analytics,Niveathika92/carbon-analytics,grainier/carbon-analytics,wso2/carbon-analytics-common,dilini-muthumala/carbon-analytics-common,grainier/carbon-analytics,grainier/carbon-analytics-common
|
package org.wso2.carbon.event.simulator.core.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.event.simulator.core.exception.FileOperationsException;
import org.wso2.carbon.event.simulator.core.exception.InsufficientAttributesException;
import org.wso2.carbon.event.simulator.core.exception.InvalidConfigException;
import org.wso2.carbon.event.simulator.core.internal.util.EventSimulatorConstants;
import org.wso2.carbon.event.simulator.core.internal.util.SimulationConfigUploader;
import org.wso2.carbon.stream.processor.common.exception.ResourceNotFoundException;
import org.wso2.carbon.utils.Utils;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* EventSimulatorMap holds the simulators available
*/
public class EventSimulatorMap {
private static final Logger log = LoggerFactory.getLogger(EventSimulatorMap.class);
private static final EventSimulatorMap instance = new EventSimulatorMap();
private final Map<String, Map<EventSimulator, String>> activeSimulatorMap = new ConcurrentHashMap<>();
private final Map<String, Map<ResourceNotFoundException.ResourceType, String>> inActiveSimulatorMap = new
ConcurrentHashMap<>();
private EventSimulatorMap() {
}
public static EventSimulatorMap getInstance() {
return instance;
}
public Map<String, Map<EventSimulator, String>> getActiveSimulatorMap() {
return activeSimulatorMap;
}
public Map<String, Map<ResourceNotFoundException.ResourceType, String>> getInActiveSimulatorMap() {
return inActiveSimulatorMap;
}
/**
* retryInActiveSimulatorDeployment() retries to create simulator objects from inactive simulation
* configurations which resulted in ResourceNotFoundException
* */
public void retryInActiveSimulatorDeployment() {
inActiveSimulatorMap.forEach((simulationName, resourceData) -> {
try {
String simulationConfig = SimulationConfigUploader.getConfigUploader().getSimulationConfig
(simulationName, (Paths.get(Utils.getCarbonHome().toString(),
EventSimulatorConstants.DIRECTORY_DEPLOYMENT,
EventSimulatorConstants.DIRECTORY_SIMULATION_CONFIGS)).toString());
if (!simulationConfig.isEmpty()) {
EventSimulator eventSimulator = new EventSimulator(simulationName, simulationConfig);
inActiveSimulatorMap.remove(simulationName);
activeSimulatorMap.put(simulationName,
Collections.singletonMap(eventSimulator, simulationConfig));
log.info("Changed status of simulation '" + simulationName + "' from inactive to active.");
}
} catch (ResourceNotFoundException e) {
/*
* check whether the resource missing is the same as previous. if not, update the entry in
* inactiveSimulation map.
* This check avoids logging errors if the same resource is missing in every retry
* */
if (!getResourceTypeForInActiveSimulator(simulationName).equals(e.getResourceType())
|| !getResourceNameForInActiveSimulator(simulationName).equals(e.getResourceName())) {
inActiveSimulatorMap.put(simulationName,
Collections.singletonMap(e.getResourceType(), e.getResourceName()));
log.error(e.getMessage(), e);
}
} catch (FileOperationsException | InvalidConfigException | InsufficientAttributesException e) {
inActiveSimulatorMap.remove(simulationName);
log.error(e.getMessage(), e);
}
});
}
/**
* retryActiveSimulatorDeployment() validates whether the active simulation configurations are still
* valid.
* */
public void retryActiveSimulatorDeployment() {
activeSimulatorMap.forEach((simulationName, simulatorData) -> {
try {
EventSimulator.validateSimulationConfig((String) simulatorData.values().toArray()[0]);
} catch (ResourceNotFoundException e) {
stopActiveSimulation(simulationName);
activeSimulatorMap.remove(simulationName);
inActiveSimulatorMap.put(simulationName,
Collections.singletonMap(e.getResourceType(), e.getResourceName()));
log.error(e.getMessage(), e);
log.info("Changed status of simulation '" + simulationName + "' from active to inactive.");
} catch (InvalidConfigException | InsufficientAttributesException e) {
stopActiveSimulation(simulationName);
activeSimulatorMap.remove(simulationName);
log.info("Simulation configuration of active simulation '" + simulationName + "' is no longer valid. "
, e);
}
});
}
/**
* retrySimulatorDeployment() revalidates active simulations and retries inactive simulations
* */
public void retrySimulatorDeployment() {
retryActiveSimulatorDeployment();
retryInActiveSimulatorDeployment();
}
/**
* getActiveSimulator() retrieves a simulator object
*
* @param simulationName name of simulator
* @return simulator object
* */
public EventSimulator getActiveSimulator(String simulationName) {
if (activeSimulatorMap.containsKey(simulationName)) {
return ((EventSimulator) activeSimulatorMap.get(simulationName).keySet().toArray()[0]);
} else {
return null;
}
}
/**
* getResourceTypeForInActiveSimulator() retrieves the resource type which is required by the inactive simulation
*
* @param simulationName name of inactive simulation
* @return resource type
* */
public ResourceNotFoundException.ResourceType getResourceTypeForInActiveSimulator(String simulationName) {
if (inActiveSimulatorMap.containsKey(simulationName)) {
return (ResourceNotFoundException.ResourceType)
inActiveSimulatorMap.get(simulationName).keySet().toArray()[0];
} else {
return null;
}
}
/**
* getResourceNameForInActiveSimulator() retrieves the name of resource required by the inactive simulation
*
* @param simulationName name of inactive simulation
* @return resource name
* */
public String getResourceNameForInActiveSimulator(String simulationName) {
if (inActiveSimulatorMap.containsKey(simulationName)) {
return (String) inActiveSimulatorMap.get(simulationName).values().toArray()[0];
} else {
return null;
}
}
/**
* containsActiveSimulator() checks whether an active simulation exists
*
* @param simulationName name of simulation
* @return true if an active simulation exists, else false
* */
public boolean containsActiveSimulator(String simulationName) {
return activeSimulatorMap.containsKey(simulationName);
}
/**
* containsInActiveSimulator() checks whether an inactive simulation exists
*
* @param simulationName name of simulation
* @return true if an inactive simulation exists, else false
* */
public boolean containsInActiveSimulator(String simulationName) {
return inActiveSimulatorMap.containsKey(simulationName);
}
/**
* stopActiveSimulation() stops a simulation
*
* @param simulationName name os simulation
* */
public void stopActiveSimulation(String simulationName) {
if (containsActiveSimulator(simulationName)) {
getActiveSimulator(simulationName).stop();
}
}
/**
* stopAllActiveSimulations() stops all active simulations
* */
public void stopAllActiveSimulations() {
activeSimulatorMap.forEach((simulationName, simulatorData) -> getActiveSimulator(simulationName).stop());
}
/**
* deleteActiveSimulation() deletes an active simulation
*
* @param simulationName name of simulation being deleted
* */
public void deleteActiveSimulation(String simulationName) {
if (activeSimulatorMap.containsKey(simulationName)) {
activeSimulatorMap.remove(simulationName);
}
}
/**
* deleteInActiveSimulation() deletes an inactive simulation
*
* @param simulationName name of simulation being deleted
* */
public void deleteInActiveSimulation(String simulationName) {
if (inActiveSimulatorMap.containsKey(simulationName)) {
inActiveSimulatorMap.remove(simulationName);
}
}
}
|
components/org.wso2.carbon.event.simulator.core/src/main/java/org/wso2/carbon/event/simulator/core/service/EventSimulatorMap.java
|
package org.wso2.carbon.event.simulator.core.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.event.simulator.core.exception.FileOperationsException;
import org.wso2.carbon.event.simulator.core.exception.InsufficientAttributesException;
import org.wso2.carbon.event.simulator.core.exception.InvalidConfigException;
import org.wso2.carbon.event.simulator.core.internal.util.EventSimulatorConstants;
import org.wso2.carbon.event.simulator.core.internal.util.SimulationConfigUploader;
import org.wso2.carbon.stream.processor.common.exception.ResourceNotFoundException;
import org.wso2.carbon.utils.Utils;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* EventSimulatorMap holds the simulators available
*/
public class EventSimulatorMap {
private static final Logger log = LoggerFactory.getLogger(EventSimulatorMap.class);
private static final EventSimulatorMap instance = new EventSimulatorMap();
private final Map<String, Map<EventSimulator, String>> activeSimulatorMap = new ConcurrentHashMap<>();
private final Map<String, Map<ResourceNotFoundException.ResourceType, String>> inActiveSimulatorMap = new
ConcurrentHashMap<>();
private EventSimulatorMap() {
}
public static EventSimulatorMap getInstance() {
return instance;
}
public Map<String, Map<EventSimulator, String>> getActiveSimulatorMap() {
return activeSimulatorMap;
}
public Map<String, Map<ResourceNotFoundException.ResourceType, String>> getInActiveSimulatorMap() {
return inActiveSimulatorMap;
}
/**
* retryInActiveSimulatorDeployment() retries to create simulator objects from inactive simulation
* configurations which resulted in ResourceNotFOundException
* */
public void retryInActiveSimulatorDeployment() {
inActiveSimulatorMap.forEach((simulationName, resourceData) -> {
try {
String simulationConfig = SimulationConfigUploader.getConfigUploader().getSimulationConfig
(simulationName, (Paths.get(Utils.getCarbonHome().toString(),
EventSimulatorConstants.DIRECTORY_DEPLOYMENT,
EventSimulatorConstants.DIRECTORY_SIMULATION_CONFIGS)).toString());
if (!simulationConfig.isEmpty()) {
EventSimulator eventSimulator = new EventSimulator(simulationName, simulationConfig);
inActiveSimulatorMap.remove(simulationName);
activeSimulatorMap.put(simulationName,
Collections.singletonMap(eventSimulator, simulationConfig));
log.info("Changed status of simulation '" + simulationName + "' from inactive to active.");
}
} catch (ResourceNotFoundException e) {
/*
* check whether the resource missing is the same as previous. if not, update the entry in
* inactiveSimulation map.
* This check avoids loggin errors if the same resource is missing in every retry
* */
if (!getResourceTypeForInActiveSimulator(simulationName).equals(e.getResourceType())
|| !getResourceNameForInActiveSimulator(simulationName).equals(e.getResourceName())) {
inActiveSimulatorMap.put(simulationName,
Collections.singletonMap(e.getResourceType(), e.getResourceName()));
log.error(e.getMessage(), e);
}
} catch (FileOperationsException | InvalidConfigException | InsufficientAttributesException e) {
inActiveSimulatorMap.remove(simulationName);
log.error(e.getMessage(), e);
}
});
}
/**
* retryActiveSimulatorDeployment() validates whether the active simulation configurations are still
* valid.
* */
public void retryActiveSimulatorDeployment() {
activeSimulatorMap.forEach((simulationName, simulatorData) -> {
try {
EventSimulator.validateSimulationConfig((String) simulatorData.values().toArray()[0]);
} catch (ResourceNotFoundException e) {
stopActiveSimulation(simulationName);
activeSimulatorMap.remove(simulationName);
inActiveSimulatorMap.put(simulationName,
Collections.singletonMap(e.getResourceType(), e.getResourceName()));
log.error(e.getMessage(), e);
log.info("Changed status of simulation '" + simulationName + "' from active to inactive.");
} catch (InvalidConfigException | InsufficientAttributesException e) {
stopActiveSimulation(simulationName);
activeSimulatorMap.remove(simulationName);
log.info("Simulation configuration of active simulation '" + simulationName + "' is no longer valid. "
, e);
}
});
}
/**
* retrySimulatorDeployment() revalidates active simulations and retries inactive simulations
* */
public void retrySimulatorDeployment() {
retryActiveSimulatorDeployment();
retryInActiveSimulatorDeployment();
}
/**
* getActiveSimulator() retrieves a simulator object
*
* @param simulationName name of simulator
* @return simulator object
* */
public EventSimulator getActiveSimulator(String simulationName) {
if (activeSimulatorMap.containsKey(simulationName)) {
return ((EventSimulator) activeSimulatorMap.get(simulationName).keySet().toArray()[0]);
} else {
return null;
}
}
/**
* getResourceTypeForInActiveSimulator() retrieves the resource type which is required by the inactive simulation
*
* @param simulationName name of inactive simulation
* @return resource type
* */
public ResourceNotFoundException.ResourceType getResourceTypeForInActiveSimulator(String simulationName) {
if (inActiveSimulatorMap.containsKey(simulationName)) {
return (ResourceNotFoundException.ResourceType)
inActiveSimulatorMap.get(simulationName).keySet().toArray()[0];
} else {
return null;
}
}
/**
* getResourceNameForInActiveSimulator() retrieves the name of resource required by the inactive simulation
*
* @param simulationName name of inactive simulation
* @return resource name
* */
public String getResourceNameForInActiveSimulator(String simulationName) {
if (inActiveSimulatorMap.containsKey(simulationName)) {
return (String) inActiveSimulatorMap.get(simulationName).values().toArray()[0];
} else {
return null;
}
}
/**
* containsActiveSimulator() checks whether an active simulation exists
*
* @param simulationName name of simulation
* @return true if an active simulation exists, else false
* */
public boolean containsActiveSimulator(String simulationName) {
return activeSimulatorMap.containsKey(simulationName);
}
/**
* containsInActiveSimulator() checks whether an inactive simulation exists
*
* @param simulationName name of simulation
* @return true if an inactive simulation exists, else false
* */
public boolean containsInActiveSimulator(String simulationName) {
return inActiveSimulatorMap.containsKey(simulationName);
}
/**
* stopActiveSimulation() stops a simulation
*
* @param simulationName name os simulation
* */
public void stopActiveSimulation(String simulationName) {
if (containsActiveSimulator(simulationName)) {
getActiveSimulator(simulationName).stop();
}
}
/**
* stopAllActiveSimulations() stops all active simulations
* */
public void stopAllActiveSimulations() {
activeSimulatorMap.forEach((simulationName, simulatorData) -> getActiveSimulator(simulationName).stop());
}
/**
* deleteActiveSimulation() deletes an active simulation
*
* @param simulationName name of simulation being deleted
* */
public void deleteActiveSimulation(String simulationName) {
if (activeSimulatorMap.containsKey(simulationName)) {
activeSimulatorMap.remove(simulationName);
}
}
/**
* deleteInActiveSimulation() deletes an inactive simulation
*
* @param simulationName name of simulation being deleted
* */
public void deleteInActiveSimulation(String simulationName) {
if (inActiveSimulatorMap.containsKey(simulationName)) {
inActiveSimulatorMap.remove(simulationName);
}
}
}
|
refactored according to PR feedback
|
components/org.wso2.carbon.event.simulator.core/src/main/java/org/wso2/carbon/event/simulator/core/service/EventSimulatorMap.java
|
refactored according to PR feedback
|
|
Java
|
apache-2.0
|
7d7e206192143e26c76e65e23c7384d720a4372d
| 0
|
AntonVasilyuk/Aduma,AntonVasilyuk/Aduma,AntonVasilyuk/Aduma,AntonVasilyuk/Aduma
|
package ru.job4j;
import org.junit.Test;
import java.util.NoSuchElementException;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
/**
* Task 4.1.1
* Test working iterator array
*
* @author Anton Vasilyuk on 03.06.2017.
* @version 1.0
* @since 0.1
*/
public class IteratorArrayTest {
/**.
* Test working iterator
*/
@Test
public void whenNeedGetAllElementFromMultidimensionalArray() {
IteratorArray iter = new IteratorArray(new int[][] {{1, 2}, {3, 4}});
int[] arrayResult = new int[4];
int[] check = {1, 2, 3, 4};
int num = 0;
for (int i = 0; i < arrayResult.length; i++) {
arrayResult[i] = (Integer) iter.next();
}
for (int i = 0; i < arrayResult.length; i++) {
if (arrayResult[i] != check[i]) {num++;}
}
boolean expect = true;
boolean fact = false;
if (num == 0) {fact = true;}
assertThat(fact, is(expect));
}
@Test
public void whenIterateTreeTimeArrayThenHasNextIsTrue() {
IteratorArray iter = new IteratorArray(new int[][] {{1, 2}, {3, 4}});
iter.next();
iter.next();
iter.next();
boolean fact = iter.hasNext();
assertThat(fact, is(true));
}
@Test
public void whenNotArrayThenErorNSEE() {
int[][] value = null;
IteratorArray iter = new IteratorArray(value);
try {
iter.hasNext();
} catch (NoSuchElementException nee) {
assertThat(nee.getMessage(), is("No numbers in iterators."));
}
}
}
|
chapter_003/iteratorArray/src/test/java/ru/job4j/IteratorArrayTest.java
|
package ru.job4j;
import org.junit.Test;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
/**
* Task 4.1.1
* Test working iterator array
*
* @author Anton Vasilyuk on 03.06.2017.
* @version 1.0
* @since 0.1
*/
public class IteratorArrayTest {
/**.
* Test working iterator
*/
@Test
public void whenNeedGetAllElementFromMultidimensionalArray() {
IteratorArray iter = new IteratorArray(new int[][] {{1, 2}, {3, 4}});
int[] arrayResult = new int[4];
int[] check = {1, 2, 3, 4};
int num = 0;
for (int i = 0; i < arrayResult.length; i++) {
arrayResult[i] = (Integer) iter.next();
}
for (int i = 0; i < arrayResult.length; i++) {
if (arrayResult[i] != check[i]) {num++;}
}
boolean expect = true;
boolean fact = false;
if (num == 0) {fact = true;}
assertThat(fact, is(expect));
}
}
|
add file for chapter_003_4.1.1
|
chapter_003/iteratorArray/src/test/java/ru/job4j/IteratorArrayTest.java
|
add file for chapter_003_4.1.1
|
|
Java
|
apache-2.0
|
f0b2d1a9d45421256f604610a7bfc0e46f6162d9
| 0
|
mi9rom/VaadHLDemo,mi9rom/VaadHLDemo,mi9rom/VaadHLDemo
|
/*
* Copyright 2015 Mirosław Romaniuk (mi9rom@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadHL.example.jpa;
import javax.persistence.EntityManager;
import org.vaadin.addons.lazyquerycontainer.LazyEntityContainer;
import com.vaadHL.AppContext;
import com.vaadHL.utl.converter.StringToPlainIntegerConverter;
import com.vaadHL.window.EM.SingIeItemFWindow;
import com.vaadHL.window.base.MWLaunchMode;
import com.vaadHL.window.base.perm.IWinPermChecker;
import com.vaadin.data.Item;
import com.vaadin.data.fieldgroup.PropertyId;
import com.vaadin.data.validator.IntegerRangeValidator;
import com.vaadin.data.validator.StringLengthValidator;
import com.vaadin.ui.Component;
import com.vaadin.ui.TextField;
public class FormTst extends SingIeItemFWindow {
private static final long serialVersionUID = -4219526966854534522L;
private FormDesWrap mUI;
@PropertyId("firstName")
private TextField fFirstName;
@PropertyId("lastName")
private TextField tfLastName;
@PropertyId("yearOfBirth")
private TextField tfYearOfBirth;
public FormTst(IWinPermChecker permChecker, CustomizeFWin customizeFWin,
MWLaunchMode launchMode, EntityManager em,
LazyEntityContainer<?> container, Object rowId,
AppContext appContext, boolean readOnlyW) {
super("M001", "Form Window Title", permChecker, customizeFWin,
launchMode, em, container, rowId, appContext, readOnlyW);
// It is very important to include this
if (!approvedToOpen)
return;
setWidth("920px");
setHeight("500px");
fFirstName.setCaption(getI18S("fFirstName"));
fFirstName.addValidator(new StringLengthValidator(getI18S("FNlEN"), 1,
15, false));
fFirstName.setRequiredError(getI18S("valReq"));
tfLastName.addValidator(new StringLengthValidator(getI18S("LNLEN"), 1,
20, false));
tfLastName.setCaption(getI18S("tfLastName"));
tfLastName.setRequiredError(getI18S("valReq"));
tfYearOfBirth.setCaption(getI18S("tfYearOfBirth"));
tfYearOfBirth.setConverter(new StringToPlainIntegerConverter());
String tfYearOfBirthMsg = getI18S("yearVal");
tfYearOfBirth.addValidator(new IntegerRangeValidator(tfYearOfBirthMsg,
1920, 2010));
tfYearOfBirth.setRequiredError(tfYearOfBirthMsg);
}
@Override
public void initConstructorWidgets() {
// -- In this example I utilize the design editor --
mUI = new FormDesWrap();
fFirstName = mUI.getFtfFirstName();
tfLastName = mUI.getFtfLastName();
tfYearOfBirth = mUI.getFtfYearOfBirth();
/*
* dpDate = mUI.getFdpDate();
* dpDate.setDateFormat("yyyy-MM-dd HH:mm:ss");
*/
// ---------------------------------------------------
}
@Override
public Component makeMiddleArea() {
return mUI;
}
// is overriden to bind fields to the FieldGroup
@Override
protected void bind(Item item) {
super.bind(item);
binder.bindMemberFields(this);
}
/**
* Example item level edition checking
*/
@Override
protected boolean canEditMsg() {
String noEdit = "Abell";
if (tfLastName.getValue().equals(noEdit)) {
getMsgs().showInfo(String.format(getI18S("edOf"), noEdit));
return false;
} else
return true;
}
/**
* Example item level deletion checking
*/
@Override
protected boolean canDeleteMsg() {
String noEdit = "Abell";
if (tfLastName.getValue().equals(noEdit)) {
getMsgs().showInfo(getI18S("youDel") + " " + noEdit);
return false;
} else
return true;
}
/**
* Cross field validation
*/
@Override
public boolean validateSave(boolean showMessages) {
if (!super.validateSave(showMessages))
return false;
if (!(fFirstName.isValid() && tfLastName.isValid() && tfYearOfBirth
.isValid())) {
if (showMessages)
getMsgs().showWarning(getI18S("thereIsAt"));
return false;
}
try {
if (tfLastName.getValue().equals("Aaron")
&& Integer.parseInt(tfYearOfBirth.getValue()) < 1980) {
if (showMessages)
getMsgs().showWarning(getI18S("arSoOld"));
return false;
}
} catch (NumberFormatException e) {
if (showMessages)
getMsgs().showError(getI18S("wrongYear"), 1200);
return false;
}
return true;
}
}
|
src/com/vaadHL/example/jpa/FormTst.java
|
/*
* Copyright 2015 Mirosław Romaniuk (mi9rom@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadHL.example.jpa;
import javax.persistence.EntityManager;
import org.vaadin.addons.lazyquerycontainer.LazyEntityContainer;
import com.vaadHL.AppContext;
import com.vaadHL.utl.converter.StringToPlainIntegerConverter;
import com.vaadHL.window.EM.SingIeItemFWindow;
import com.vaadHL.window.base.MWLaunchMode;
import com.vaadHL.window.base.perm.IWinPermChecker;
import com.vaadin.data.Item;
import com.vaadin.data.fieldgroup.PropertyId;
import com.vaadin.data.validator.IntegerRangeValidator;
import com.vaadin.data.validator.StringLengthValidator;
import com.vaadin.ui.Component;
import com.vaadin.ui.TextField;
public class FormTst extends SingIeItemFWindow {
private static final long serialVersionUID = -4219526966854534522L;
private FormDesWrap mUI;
@PropertyId("firstName")
private TextField fFirstName;
@PropertyId("lastName")
private TextField tfLastName;
@PropertyId("yearOfBirth")
private TextField tfYearOfBirth;
public FormTst(IWinPermChecker permChecker, CustomizeFWin customizeFWin,
MWLaunchMode launchMode, EntityManager em,
LazyEntityContainer<?> container, Object rowId,
AppContext appContext, boolean readOnlyW) {
super("M001", "Form Window Title", permChecker, customizeFWin,
launchMode, em, container, rowId, appContext, readOnlyW);
// It is very important to include this
if (!approvedToOpen)
return;
setWidth("920px");
setHeight("500px");
fFirstName.setCaption(getI18S("fFirstName"));
fFirstName.addValidator(new StringLengthValidator(getI18S("FNlEN"), 1,
15, false));
fFirstName.setRequiredError(getI18S("valReq"));
tfLastName.addValidator(new StringLengthValidator(getI18S("LNLEN"), 1,
20, false));
tfLastName.setCaption(getI18S("tfLastName"));
tfLastName.setRequiredError(getI18S("valReq"));
tfYearOfBirth.setCaption(getI18S("tfYearOfBirth"));
tfYearOfBirth.setConverter(new StringToPlainIntegerConverter());
String tfYearOfBirthMsg = getI18S("yearVal");
tfYearOfBirth.addValidator(new IntegerRangeValidator(tfYearOfBirthMsg,
1920, 2010));
tfYearOfBirth.setRequiredError(tfYearOfBirthMsg);
}
@Override
public void initConstructorWidgets() {
// -- In this example I utilize the design editor --
mUI = new FormDesWrap();
fFirstName = mUI.getFtfFirstName();
tfLastName = mUI.getFtfLastName();
tfYearOfBirth = mUI.getFtfYearOfBirth();
/*
* dpDate = mUI.getFdpDate();
* dpDate.setDateFormat("yyyy-MM-dd HH:mm:ss");
*/
// ---------------------------------------------------
}
@Override
public Component makeMiddleArea() {
return mUI;
}
// is overriden to bind fields to the FieldGroup
@Override
protected void bind(Item item) {
super.bind(item);
binder.bindMemberFields(this);
}
/**
* Example item level edition checking
*/
@Override
protected boolean canEditMsg() {
String noEdit = "Abbassi";
if (tfLastName.getValue().equals(noEdit)) {
getMsgs().showInfo(String.format(getI18S("edOf"), noEdit));
return false;
} else
return true;
}
/**
* Example item level deletion checking
*/
@Override
protected boolean canDeleteMsg() {
String noEdit = "Abbassi";
if (tfLastName.getValue().equals(noEdit)) {
getMsgs().showInfo(getI18S("youDel") + " " + noEdit);
return false;
} else
return true;
}
/**
* Cross field validation
*/
@Override
public boolean validateSave(boolean showMessages) {
if (!super.validateSave(showMessages))
return false;
if (!(fFirstName.isValid() && tfLastName.isValid() && tfYearOfBirth
.isValid())) {
if (showMessages)
getMsgs().showWarning(getI18S("thereIsAt"));
return false;
}
try {
if (tfLastName.getValue().equals("Aaron")
&& Integer.parseInt(tfYearOfBirth.getValue()) < 1980) {
if (showMessages)
getMsgs().showWarning(getI18S("arSoOld"));
return false;
}
} catch (NumberFormatException e) {
if (showMessages)
getMsgs().showError(getI18S("wrongYear"), 1200);
return false;
}
return true;
}
}
|
"Abell" test
|
src/com/vaadHL/example/jpa/FormTst.java
|
"Abell" test
|
|
Java
|
apache-2.0
|
45c3b417f9ebfab7ce5bfead930e222b7e23124d
| 0
|
mariacioffi/azkaban,sunghyuk/azkaban,weikang2002/azkaban,inoviaazkaban/azkaban,erwa/azkaban,researchgate/azkaban,HappyRay/azkaban,jackrjli/azkaban,hluu/azkaban2,kaneda/azkaban,azkaban/azkaban,HappyRay/azkaban,logiclord/azkaban,reallocf/azkaban,davidzchen/azkaban,wangqiaoshi/azkaban,mariacioffi/azkaban,azkaban/azkaban,kaneda/azkaban,HappyRay/azkaban,poporisil/azkaban3,poporisil/azkaban3,linearregression/azkaban,sunghyuk/azkaban,mariacioffi/azkaban,FelixGV/azkaban2,reallocf/azkaban,chengren311/azkaban,binhnv/azkaban,sunghyuk/azkaban,relateiq/azkaban,relateiq/azkaban,binhnv/azkaban,FelixGV/azkaban2,evlstyle/azkaban,inoviaazkaban/azkaban,poporisil/azkaban3,logiclord/azkaban,jackrjli/azkaban,HappyRay/azkaban,backingwu/azkaban,Shopify/azkaban,azkaban/azkaban,HappyRay/azkaban,shixin198642/azkaban2,johnyu0520/azkaban,relateiq/azkaban,poporisil/azkaban3,erwa/azkaban,erwa/azkaban,backingwu/azkaban,reallocf/azkaban,mradamlacey/azkaban,poporisil/azkaban3,erwa/azkaban,backingwu/azkaban,FelixGV/azkaban2,sunghyuk/azkaban,davidzchen/azkaban,mradamlacey/azkaban,gradleupdate/azkaban2,johnyu0520/azkaban,binhnv/azkaban,jackrjli/azkaban,researchgate/azkaban,erwa/azkaban,hluu/azkaban2,cjyu/azkaban2,wangqiaoshi/azkaban,linearregression/azkaban,Shopify/azkaban,inoviaazkaban/azkaban,evlstyle/azkaban,azkaban/azkaban,mradamlacey/azkaban,cjyu/azkaban2,Shopify/azkaban,wangqiaoshi/azkaban,chengren311/azkaban,johnyu0520/azkaban,weikang2002/azkaban,mradamlacey/azkaban,davidzchen/azkaban,chengren311/azkaban,logiclord/azkaban,relateiq/azkaban,shixin198642/azkaban2,binhnv/azkaban,sunghyuk/azkaban,linearregression/azkaban,mradamlacey/azkaban,chengren311/azkaban,researchgate/azkaban,HappyRay/azkaban,reallocf/azkaban,gradleupdate/azkaban2,cjyu/azkaban2,reallocf/azkaban,nomorogbe/azkaban,researchgate/azkaban,evlstyle/azkaban,mariacioffi/azkaban,mariacioffi/azkaban,hluu/azkaban2,johnyu0520/azkaban,weikang2002/azkaban,gradleupdate/azkaban2,binhnv/azkaban,kaneda/azkaban,azkaban/azkaban,shixin198642/azkaban2,davidzchen/azkaban,jackrjli/azkaban,evlstyle/azkaban,azkaban/azkaban,logiclord/azkaban,jackrjli/azkaban,nomorogbe/azkaban,nomorogbe/azkaban,chengren311/azkaban
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import java.util.Arrays;
import java.util.Collections;
import org.apache.log4j.Appender;
import org.apache.log4j.EnhancedPatternLayout;
import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
import org.apache.log4j.RollingFileAppender;
import azkaban.execapp.event.BlockingStatus;
import azkaban.execapp.event.Event;
import azkaban.execapp.event.Event.Type;
import azkaban.execapp.event.EventHandler;
import azkaban.execapp.event.FlowWatcher;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutorLoader;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.flow.CommonJobProperties;
import azkaban.jobExecutor.AbstractProcessJob;
import azkaban.jobExecutor.Job;
import azkaban.jobtype.JobTypeManager;
import azkaban.jobtype.JobTypeManagerException;
import azkaban.utils.Props;
public class JobRunner extends EventHandler implements Runnable {
private final Layout DEFAULT_LAYOUT = new EnhancedPatternLayout("%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
private ExecutorLoader loader;
private Props props;
private Props outputProps;
private ExecutableNode node;
private File workingDir;
private Logger logger = null;
private Layout loggerLayout = DEFAULT_LAYOUT;
private Logger flowLogger = null;
private Appender jobAppender;
private File logFile;
private Job job;
private int executionId = -1;
private static final Object logCreatorLock = new Object();
private Object syncObject = new Object();
private final JobTypeManager jobtypeManager;
// Used by the job to watch and block against another flow
private Integer pipelineLevel = null;
private FlowWatcher watcher = null;
private Set<String> pipelineJobs = new HashSet<String>();
private Set<String> proxyUsers = null;
private String jobLogChunkSize;
private int jobLogBackupIndex;
private long delayStartMs = 0;
private boolean cancelled = false;
private BlockingStatus currentBlockStatus = null;
public JobRunner(ExecutableNode node, Props props, File workingDir, ExecutorLoader loader, JobTypeManager jobtypeManager) {
this.props = props;
this.node = node;
this.workingDir = workingDir;
this.executionId = node.getExecutionId();
this.loader = loader;
this.jobtypeManager = jobtypeManager;
}
public void setValidatedProxyUsers(Set<String> proxyUsers) {
this.proxyUsers = proxyUsers;
}
public void setLogSettings(Logger flowLogger, String logFileChuckSize, int numLogBackup ) {
this.flowLogger = flowLogger;
this.jobLogChunkSize = logFileChuckSize;
this.jobLogBackupIndex = numLogBackup;
}
public Props getProps() {
return props;
}
public void setPipeline(FlowWatcher watcher, int pipelineLevel) {
this.watcher = watcher;
this.pipelineLevel = pipelineLevel;
if (this.pipelineLevel == 1) {
pipelineJobs.add(node.getJobId());
}
else if (this.pipelineLevel == 2) {
pipelineJobs.add(node.getJobId());
pipelineJobs.addAll(node.getOutNodes());
}
}
public void setDelayStart(long delayMS) {
delayStartMs = delayMS;
}
public long getDelayStart() {
return delayStartMs;
}
public ExecutableNode getNode() {
return node;
}
public String getLogFilePath() {
return logFile == null ? null : logFile.getPath();
}
private void createLogger() {
// Create logger
synchronized (logCreatorLock) {
String loggerName = System.currentTimeMillis() + "." + executionId + "." + node.getJobId();
logger = Logger.getLogger(loggerName);
// Create file appender
String logName = createLogFileName(node.getExecutionId(), node.getJobId(), node.getAttempt());
logFile = new File(workingDir, logName);
String absolutePath = logFile.getAbsolutePath();
jobAppender = null;
try {
RollingFileAppender fileAppender = new RollingFileAppender(loggerLayout, absolutePath, true);
fileAppender.setMaxBackupIndex(jobLogBackupIndex);
fileAppender.setMaxFileSize(jobLogChunkSize);
jobAppender = fileAppender;
logger.addAppender(jobAppender);
logger.setAdditivity(false);
} catch (IOException e) {
flowLogger.error("Could not open log file in " + workingDir + " for job " + node.getJobId(), e);
}
}
}
private void closeLogger() {
if (jobAppender != null) {
logger.removeAppender(jobAppender);
jobAppender.close();
}
}
private void writeStatus() {
try {
node.setUpdateTime(System.currentTimeMillis());
loader.updateExecutableNode(node);
} catch (ExecutorManagerException e) {
flowLogger.error("Could not update job properties in db for " + node.getJobId(), e);
}
}
@Override
public void run() {
Thread.currentThread().setName("JobRunner-" + node.getJobId() + "-" + executionId);
if (node.getStatus() == Status.DISABLED) {
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
node.setStatus(Status.SKIPPED);
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
} else if (this.cancelled) {
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
node.setStatus(Status.FAILED);
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
} else if (node.getStatus() == Status.FAILED || node.getStatus() == Status.KILLED) {
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
}
else {
createLogger();
node.setUpdateTime(System.currentTimeMillis());
// For pipelining of jobs. Will watch other jobs.
if (!pipelineJobs.isEmpty()) {
String blockedList = "";
ArrayList<BlockingStatus> blockingStatus = new ArrayList<BlockingStatus>();
for (String waitingJobId : pipelineJobs) {
Status status = watcher.peekStatus(waitingJobId);
if (status != null && !Status.isStatusFinished(status)) {
BlockingStatus block = watcher.getBlockingStatus(waitingJobId);
blockingStatus.add(block);
blockedList += waitingJobId + ",";
}
}
if (!blockingStatus.isEmpty()) {
logger.info("Pipeline job " + node.getJobId() + " waiting on " + blockedList + " in execution " + watcher.getExecId());
for(BlockingStatus bStatus: blockingStatus) {
logger.info("Waiting on pipelined job " + bStatus.getJobId());
currentBlockStatus = bStatus;
bStatus.blockOnFinishedStatus();
logger.info("Pipelined job " + bStatus.getJobId() + " finished.");
if (watcher.isWatchCancelled()) {
break;
}
}
writeStatus();
fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED));
}
if (watcher.isWatchCancelled()) {
logger.info("Job was cancelled while waiting on pipeline. Quiting.");
node.setStartTime(System.currentTimeMillis());
node.setEndTime(System.currentTimeMillis());
node.setStatus(Status.FAILED);
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
}
}
currentBlockStatus = null;
long currentTime = System.currentTimeMillis();
if (delayStartMs > 0) {
logger.info("Delaying start of execution for " + delayStartMs + " milliseconds.");
synchronized(this) {
try {
this.wait(delayStartMs);
logger.info("Execution has been delayed for " + delayStartMs + " ms. Continuing with execution.");
} catch (InterruptedException e) {
logger.error("Job " + node.getJobId() + " was to be delayed for " + delayStartMs + ". Interrupted after " + (System.currentTimeMillis() - currentTime));
}
}
if (cancelled) {
logger.info("Job was cancelled while in delay. Quiting.");
node.setStartTime(System.currentTimeMillis());
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
}
}
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
try {
loader.uploadExecutableNode(node, props);
} catch (ExecutorManagerException e1) {
logger.error("Error writing initial node properties");
}
if (prepareJob()) {
writeStatus();
fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED), false);
runJob();
}
else {
node.setStatus(Status.FAILED);
logError("Job run failed!");
}
node.setEndTime(System.currentTimeMillis());
logInfo("Finishing job " + node.getJobId() + " at " + node.getEndTime());
closeLogger();
writeStatus();
if (logFile != null) {
try {
File[] files = logFile.getParentFile().listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith(logFile.getName());
}
}
);
Arrays.sort(files, Collections.reverseOrder());
loader.uploadLogFile(executionId, node.getJobId(), node.getAttempt(), files);
} catch (ExecutorManagerException e) {
flowLogger.error("Error writing out logs for job " + node.getJobId(), e);
}
}
else {
flowLogger.info("Log file for job " + node.getJobId() + " is null");
}
}
fireEvent(Event.create(this, Type.JOB_FINISHED));
}
private void fireEvent(Event event) {
fireEvent(event, true);
}
private void fireEvent(Event event, boolean updateTime) {
if (updateTime) {
node.setUpdateTime(System.currentTimeMillis());
}
this.fireEventListeners(event);
}
private boolean prepareJob() throws RuntimeException {
// Check pre conditions
if (props == null || cancelled) {
logError("Failing job. The job properties don't exist");
return false;
}
synchronized(syncObject) {
if (node.getStatus() == Status.FAILED || cancelled) {
return false;
}
if (node.getAttempt() > 0) {
logInfo("Starting job " + node.getJobId() + " attempt " + node.getAttempt() + " at " + node.getStartTime());
}
else {
logInfo("Starting job " + node.getJobId() + " at " + node.getStartTime());
}
props.put(CommonJobProperties.JOB_ATTEMPT, node.getAttempt());
props.put(CommonJobProperties.JOB_METADATA_FILE, createMetaDataFileName(executionId, node.getJobId(), node.getAttempt()));
node.setStatus(Status.RUNNING);
// Ability to specify working directory
if (!props.containsKey(AbstractProcessJob.WORKING_DIR)) {
props.put(AbstractProcessJob.WORKING_DIR, workingDir.getAbsolutePath());
}
if(props.containsKey("user.to.proxy")) {
String jobProxyUser = props.getString("user.to.proxy");
if(proxyUsers != null && !proxyUsers.contains(jobProxyUser)) {
logger.error("User " + jobProxyUser + " has no permission to execute this job " + node.getJobId() + "!");
return false;
}
}
//job = JobWrappingFactory.getJobWrappingFactory().buildJobExecutor(node.getJobId(), props, logger);
try {
job = jobtypeManager.buildJobExecutor(node.getJobId(), props, logger);
}
catch (JobTypeManagerException e) {
logger.error("Failed to build job type, skipping this job");
return false;
}
}
return true;
}
private void runJob() {
try {
job.run();
} catch (Exception e) {
e.printStackTrace();
node.setStatus(Status.FAILED);
logError("Job run failed!");
logError(e.getMessage() + e.getCause());
return;
}
node.setStatus(Status.SUCCEEDED);
if (job != null) {
outputProps = job.getJobGeneratedProperties();
node.setOutputProps(outputProps);
}
}
public void cancel() {
synchronized (syncObject) {
logError("Cancel has been called.");
this.cancelled = true;
BlockingStatus status = currentBlockStatus;
if (status != null) {
status.unblock();
}
// Cancel code here
if (job == null) {
logError("Job hasn't started yet.");
// Just in case we're waiting on the delay
synchronized(this) {
this.notify();
}
return;
}
try {
job.cancel();
} catch (Exception e) {
logError(e.getMessage());
logError("Failed trying to cancel job. Maybe it hasn't started running yet or just finished.");
}
}
}
public boolean isCancelled() {
return cancelled;
}
public Status getStatus() {
return node.getStatus();
}
public Props getOutputProps() {
return outputProps;
}
private void logError(String message) {
if (logger != null) {
logger.error(message);
}
}
private void logInfo(String message) {
if (logger != null) {
logger.info(message);
}
}
public File getLogFile() {
return logFile;
}
public int getRetries() {
return props.getInt("retries", 0);
}
public long getRetryBackoff() {
return props.getLong("retry.backoff", 0);
}
public static String createLogFileName(int executionId, String jobId, int attempt) {
return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".log" : "_job." + executionId + "." + jobId + ".log";
}
public static String createMetaDataFileName(int executionId, String jobId, int attempt) {
return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".meta" : "_job." + executionId + "." + jobId + ".meta";
}
}
|
src/java/azkaban/execapp/JobRunner.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import java.util.Arrays;
import java.util.Collections;
import org.apache.log4j.Appender;
import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.RollingFileAppender;
import azkaban.execapp.event.BlockingStatus;
import azkaban.execapp.event.Event;
import azkaban.execapp.event.Event.Type;
import azkaban.execapp.event.EventHandler;
import azkaban.execapp.event.FlowWatcher;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutorLoader;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.flow.CommonJobProperties;
import azkaban.jobExecutor.AbstractProcessJob;
import azkaban.jobExecutor.Job;
import azkaban.jobtype.JobTypeManager;
import azkaban.jobtype.JobTypeManagerException;
import azkaban.utils.Props;
public class JobRunner extends EventHandler implements Runnable {
private static final Layout DEFAULT_LAYOUT = new PatternLayout("%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
private ExecutorLoader loader;
private Props props;
private Props outputProps;
private ExecutableNode node;
private File workingDir;
private Logger logger = null;
private Layout loggerLayout = DEFAULT_LAYOUT;
private Logger flowLogger = null;
private Appender jobAppender;
private File logFile;
private Job job;
private int executionId = -1;
private static final Object logCreatorLock = new Object();
private Object syncObject = new Object();
private final JobTypeManager jobtypeManager;
// Used by the job to watch and block against another flow
private Integer pipelineLevel = null;
private FlowWatcher watcher = null;
private Set<String> pipelineJobs = new HashSet<String>();
private Set<String> proxyUsers = null;
private String jobLogChunkSize;
private int jobLogBackupIndex;
private long delayStartMs = 0;
private boolean cancelled = false;
private BlockingStatus currentBlockStatus = null;
public JobRunner(ExecutableNode node, Props props, File workingDir, ExecutorLoader loader, JobTypeManager jobtypeManager) {
this.props = props;
this.node = node;
this.workingDir = workingDir;
this.executionId = node.getExecutionId();
this.loader = loader;
this.jobtypeManager = jobtypeManager;
}
public void setValidatedProxyUsers(Set<String> proxyUsers) {
this.proxyUsers = proxyUsers;
}
public void setLogSettings(Logger flowLogger, String logFileChuckSize, int numLogBackup ) {
this.flowLogger = flowLogger;
this.jobLogChunkSize = logFileChuckSize;
this.jobLogBackupIndex = numLogBackup;
}
public Props getProps() {
return props;
}
public void setPipeline(FlowWatcher watcher, int pipelineLevel) {
this.watcher = watcher;
this.pipelineLevel = pipelineLevel;
if (this.pipelineLevel == 1) {
pipelineJobs.add(node.getJobId());
}
else if (this.pipelineLevel == 2) {
pipelineJobs.add(node.getJobId());
pipelineJobs.addAll(node.getOutNodes());
}
}
public void setDelayStart(long delayMS) {
delayStartMs = delayMS;
}
public long getDelayStart() {
return delayStartMs;
}
public ExecutableNode getNode() {
return node;
}
public String getLogFilePath() {
return logFile == null ? null : logFile.getPath();
}
private void createLogger() {
// Create logger
synchronized (logCreatorLock) {
String loggerName = System.currentTimeMillis() + "." + executionId + "." + node.getJobId();
logger = Logger.getLogger(loggerName);
// Create file appender
String logName = createLogFileName(node.getExecutionId(), node.getJobId(), node.getAttempt());
logFile = new File(workingDir, logName);
String absolutePath = logFile.getAbsolutePath();
jobAppender = null;
try {
RollingFileAppender fileAppender = new RollingFileAppender(loggerLayout, absolutePath, true);
fileAppender.setMaxBackupIndex(jobLogBackupIndex);
fileAppender.setMaxFileSize(jobLogChunkSize);
jobAppender = fileAppender;
logger.addAppender(jobAppender);
logger.setAdditivity(false);
} catch (IOException e) {
flowLogger.error("Could not open log file in " + workingDir + " for job " + node.getJobId(), e);
}
}
}
private void closeLogger() {
if (jobAppender != null) {
logger.removeAppender(jobAppender);
jobAppender.close();
}
}
private void writeStatus() {
try {
node.setUpdateTime(System.currentTimeMillis());
loader.updateExecutableNode(node);
} catch (ExecutorManagerException e) {
flowLogger.error("Could not update job properties in db for " + node.getJobId(), e);
}
}
@Override
public void run() {
Thread.currentThread().setName("JobRunner-" + node.getJobId() + "-" + executionId);
if (node.getStatus() == Status.DISABLED) {
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
node.setStatus(Status.SKIPPED);
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
} else if (this.cancelled) {
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
node.setStatus(Status.FAILED);
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
} else if (node.getStatus() == Status.FAILED || node.getStatus() == Status.KILLED) {
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
}
else {
createLogger();
node.setUpdateTime(System.currentTimeMillis());
// For pipelining of jobs. Will watch other jobs.
if (!pipelineJobs.isEmpty()) {
String blockedList = "";
ArrayList<BlockingStatus> blockingStatus = new ArrayList<BlockingStatus>();
for (String waitingJobId : pipelineJobs) {
Status status = watcher.peekStatus(waitingJobId);
if (status != null && !Status.isStatusFinished(status)) {
BlockingStatus block = watcher.getBlockingStatus(waitingJobId);
blockingStatus.add(block);
blockedList += waitingJobId + ",";
}
}
if (!blockingStatus.isEmpty()) {
logger.info("Pipeline job " + node.getJobId() + " waiting on " + blockedList + " in execution " + watcher.getExecId());
for(BlockingStatus bStatus: blockingStatus) {
logger.info("Waiting on pipelined job " + bStatus.getJobId());
currentBlockStatus = bStatus;
bStatus.blockOnFinishedStatus();
logger.info("Pipelined job " + bStatus.getJobId() + " finished.");
if (watcher.isWatchCancelled()) {
break;
}
}
writeStatus();
fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED));
}
if (watcher.isWatchCancelled()) {
logger.info("Job was cancelled while waiting on pipeline. Quiting.");
node.setStartTime(System.currentTimeMillis());
node.setEndTime(System.currentTimeMillis());
node.setStatus(Status.FAILED);
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
}
}
currentBlockStatus = null;
long currentTime = System.currentTimeMillis();
if (delayStartMs > 0) {
logger.info("Delaying start of execution for " + delayStartMs + " milliseconds.");
synchronized(this) {
try {
this.wait(delayStartMs);
logger.info("Execution has been delayed for " + delayStartMs + " ms. Continuing with execution.");
} catch (InterruptedException e) {
logger.error("Job " + node.getJobId() + " was to be delayed for " + delayStartMs + ". Interrupted after " + (System.currentTimeMillis() - currentTime));
}
}
if (cancelled) {
logger.info("Job was cancelled while in delay. Quiting.");
node.setStartTime(System.currentTimeMillis());
node.setEndTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_FINISHED));
return;
}
}
node.setStartTime(System.currentTimeMillis());
fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
try {
loader.uploadExecutableNode(node, props);
} catch (ExecutorManagerException e1) {
logger.error("Error writing initial node properties");
}
if (prepareJob()) {
writeStatus();
fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED), false);
runJob();
}
else {
node.setStatus(Status.FAILED);
logError("Job run failed!");
}
node.setEndTime(System.currentTimeMillis());
logInfo("Finishing job " + node.getJobId() + " at " + node.getEndTime());
closeLogger();
writeStatus();
if (logFile != null) {
try {
File[] files = logFile.getParentFile().listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith(logFile.getName());
}
}
);
Arrays.sort(files, Collections.reverseOrder());
loader.uploadLogFile(executionId, node.getJobId(), node.getAttempt(), files);
} catch (ExecutorManagerException e) {
flowLogger.error("Error writing out logs for job " + node.getJobId(), e);
}
}
else {
flowLogger.info("Log file for job " + node.getJobId() + " is null");
}
}
fireEvent(Event.create(this, Type.JOB_FINISHED));
}
private void fireEvent(Event event) {
fireEvent(event, true);
}
private void fireEvent(Event event, boolean updateTime) {
if (updateTime) {
node.setUpdateTime(System.currentTimeMillis());
}
this.fireEventListeners(event);
}
private boolean prepareJob() throws RuntimeException {
// Check pre conditions
if (props == null || cancelled) {
logError("Failing job. The job properties don't exist");
return false;
}
synchronized(syncObject) {
if (node.getStatus() == Status.FAILED || cancelled) {
return false;
}
if (node.getAttempt() > 0) {
logInfo("Starting job " + node.getJobId() + " attempt " + node.getAttempt() + " at " + node.getStartTime());
}
else {
logInfo("Starting job " + node.getJobId() + " at " + node.getStartTime());
}
props.put(CommonJobProperties.JOB_ATTEMPT, node.getAttempt());
props.put(CommonJobProperties.JOB_METADATA_FILE, createMetaDataFileName(executionId, node.getJobId(), node.getAttempt()));
node.setStatus(Status.RUNNING);
// Ability to specify working directory
if (!props.containsKey(AbstractProcessJob.WORKING_DIR)) {
props.put(AbstractProcessJob.WORKING_DIR, workingDir.getAbsolutePath());
}
if(props.containsKey("user.to.proxy")) {
String jobProxyUser = props.getString("user.to.proxy");
if(proxyUsers != null && !proxyUsers.contains(jobProxyUser)) {
logger.error("User " + jobProxyUser + " has no permission to execute this job " + node.getJobId() + "!");
return false;
}
}
//job = JobWrappingFactory.getJobWrappingFactory().buildJobExecutor(node.getJobId(), props, logger);
try {
job = jobtypeManager.buildJobExecutor(node.getJobId(), props, logger);
}
catch (JobTypeManagerException e) {
logger.error("Failed to build job type, skipping this job");
return false;
}
}
return true;
}
private void runJob() {
try {
job.run();
} catch (Exception e) {
e.printStackTrace();
node.setStatus(Status.FAILED);
logError("Job run failed!");
logError(e.getMessage() + e.getCause());
return;
}
node.setStatus(Status.SUCCEEDED);
if (job != null) {
outputProps = job.getJobGeneratedProperties();
node.setOutputProps(outputProps);
}
}
public void cancel() {
synchronized (syncObject) {
logError("Cancel has been called.");
this.cancelled = true;
BlockingStatus status = currentBlockStatus;
if (status != null) {
status.unblock();
}
// Cancel code here
if (job == null) {
logError("Job hasn't started yet.");
// Just in case we're waiting on the delay
synchronized(this) {
this.notify();
}
return;
}
try {
job.cancel();
} catch (Exception e) {
logError(e.getMessage());
logError("Failed trying to cancel job. Maybe it hasn't started running yet or just finished.");
}
}
}
public boolean isCancelled() {
return cancelled;
}
public Status getStatus() {
return node.getStatus();
}
public Props getOutputProps() {
return outputProps;
}
private void logError(String message) {
if (logger != null) {
logger.error(message);
}
}
private void logInfo(String message) {
if (logger != null) {
logger.info(message);
}
}
public File getLogFile() {
return logFile;
}
public int getRetries() {
return props.getInt("retries", 0);
}
public long getRetryBackoff() {
return props.getLong("retry.backoff", 0);
}
public static String createLogFileName(int executionId, String jobId, int attempt) {
return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".log" : "_job." + executionId + "." + jobId + ".log";
}
public static String createMetaDataFileName(int executionId, String jobId, int attempt) {
return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".meta" : "_job." + executionId + "." + jobId + ".meta";
}
}
|
logs from different jobs may appear in the same log file due to Layout thread safety issue.
Using EnhancedPatternLayout for each and every job should fix this.
|
src/java/azkaban/execapp/JobRunner.java
|
logs from different jobs may appear in the same log file due to Layout thread safety issue. Using EnhancedPatternLayout for each and every job should fix this.
|
|
Java
|
apache-2.0
|
31f0e1d2c94adbd789f27a17058266032051e16d
| 0
|
pedrofvteixeira/pentaho-kettle,hudak/pentaho-kettle,tkafalas/pentaho-kettle,skofra0/pentaho-kettle,Advent51/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,bmorrise/pentaho-kettle,Advent51/pentaho-kettle,pedrofvteixeira/pentaho-kettle,emartin-pentaho/pentaho-kettle,lgrill-pentaho/pentaho-kettle,aminmkhan/pentaho-kettle,matthewtckr/pentaho-kettle,hudak/pentaho-kettle,hudak/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,DFieldFL/pentaho-kettle,graimundo/pentaho-kettle,DFieldFL/pentaho-kettle,roboguy/pentaho-kettle,pminutillo/pentaho-kettle,skofra0/pentaho-kettle,flbrino/pentaho-kettle,pminutillo/pentaho-kettle,kurtwalker/pentaho-kettle,HiromuHota/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,e-cuellar/pentaho-kettle,pentaho/pentaho-kettle,cjsonger/pentaho-kettle,mbatchelor/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,hudak/pentaho-kettle,cjsonger/pentaho-kettle,ccaspanello/pentaho-kettle,aminmkhan/pentaho-kettle,alina-ipatina/pentaho-kettle,marcoslarsen/pentaho-kettle,bmorrise/pentaho-kettle,pavel-sakun/pentaho-kettle,zlcnju/kettle,tkafalas/pentaho-kettle,bmorrise/pentaho-kettle,kurtwalker/pentaho-kettle,e-cuellar/pentaho-kettle,lgrill-pentaho/pentaho-kettle,roboguy/pentaho-kettle,emartin-pentaho/pentaho-kettle,zlcnju/kettle,bmorrise/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,mdamour1976/pentaho-kettle,alina-ipatina/pentaho-kettle,HiromuHota/pentaho-kettle,DFieldFL/pentaho-kettle,mkambol/pentaho-kettle,ccaspanello/pentaho-kettle,SergeyTravin/pentaho-kettle,dkincade/pentaho-kettle,mbatchelor/pentaho-kettle,pavel-sakun/pentaho-kettle,mdamour1976/pentaho-kettle,marcoslarsen/pentaho-kettle,mdamour1976/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,ViswesvarSekar/pentaho-kettle,zlcnju/kettle,DFieldFL/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,zlcnju/kettle,mbatchelor/pentaho-kettle,ViswesvarSekar/pentaho-kettle,lgrill-pentaho/pentaho-kettle,rmansoor/pentaho-kettle,wseyler/pentaho-kettle,mdamour1976/pentaho-kettle,HiromuHota/pentaho-kettle,wseyler/pentaho-kettle,aminmkhan/pentaho-kettle,ccaspanello/pentaho-kettle,mbatchelor/pentaho-kettle,wseyler/pentaho-kettle,Advent51/pentaho-kettle,skofra0/pentaho-kettle,dkincade/pentaho-kettle,matthewtckr/pentaho-kettle,flbrino/pentaho-kettle,tmcsantos/pentaho-kettle,mkambol/pentaho-kettle,stepanovdg/pentaho-kettle,matthewtckr/pentaho-kettle,aminmkhan/pentaho-kettle,skofra0/pentaho-kettle,pentaho/pentaho-kettle,pedrofvteixeira/pentaho-kettle,stepanovdg/pentaho-kettle,HiromuHota/pentaho-kettle,cjsonger/pentaho-kettle,SergeyTravin/pentaho-kettle,emartin-pentaho/pentaho-kettle,graimundo/pentaho-kettle,tmcsantos/pentaho-kettle,tmcsantos/pentaho-kettle,flbrino/pentaho-kettle,graimundo/pentaho-kettle,mkambol/pentaho-kettle,pedrofvteixeira/pentaho-kettle,tkafalas/pentaho-kettle,flbrino/pentaho-kettle,ccaspanello/pentaho-kettle,dkincade/pentaho-kettle,e-cuellar/pentaho-kettle,e-cuellar/pentaho-kettle,ViswesvarSekar/pentaho-kettle,rmansoor/pentaho-kettle,SergeyTravin/pentaho-kettle,rmansoor/pentaho-kettle,SergeyTravin/pentaho-kettle,pminutillo/pentaho-kettle,ddiroma/pentaho-kettle,tmcsantos/pentaho-kettle,mkambol/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,emartin-pentaho/pentaho-kettle,pavel-sakun/pentaho-kettle,tkafalas/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,alina-ipatina/pentaho-kettle,alina-ipatina/pentaho-kettle,roboguy/pentaho-kettle,pentaho/pentaho-kettle,kurtwalker/pentaho-kettle,marcoslarsen/pentaho-kettle,stepanovdg/pentaho-kettle,stepanovdg/pentaho-kettle,dkincade/pentaho-kettle,ddiroma/pentaho-kettle,ddiroma/pentaho-kettle,pentaho/pentaho-kettle,kurtwalker/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,pminutillo/pentaho-kettle,rmansoor/pentaho-kettle,matthewtckr/pentaho-kettle,ddiroma/pentaho-kettle,pavel-sakun/pentaho-kettle,graimundo/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,cjsonger/pentaho-kettle,ViswesvarSekar/pentaho-kettle,wseyler/pentaho-kettle,roboguy/pentaho-kettle,lgrill-pentaho/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,Advent51/pentaho-kettle,marcoslarsen/pentaho-kettle
|
// CHECKSTYLE:FileLength:OFF
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.core.widget;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.DragSource;
import org.eclipse.swt.dnd.DragSourceEvent;
import org.eclipse.swt.dnd.DragSourceListener;
import org.eclipse.swt.dnd.TextTransfer;
import org.eclipse.swt.dnd.Transfer;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.ScrollBar;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Condition;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.undo.TransAction;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.dialog.EnterConditionDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
/**
* Widget to display or modify data, displayed in a Table format.
*
* @author Matt
* @since 27-05-2003
*/
public class TableView extends Composite {
private static Class<?> PKG = TableView.class; // for i18n purposes, needed by Translator2!!
private Composite parent;
private ColumnInfo[] columns;
private int rows;
private boolean readonly;
private int buttonRownr;
private int buttonColnr;
private String buttonContent;
private boolean previousShift;
private int selectionStart;
public Table table;
private TableEditor editor;
private TableColumn[] tablecolumn;
private PropsUI props;
private Control text;
private CCombo combo;
private Button button;
private TableItem activeTableItem;
private int activeTableColumn;
private int activeTableRow;
private KeyListener lsKeyText, lsKeyCombo;
private FocusAdapter lsFocusText, lsFocusCombo;
private ModifyListener lsModCombo;
private TraverseListener lsTraverse;
private int sortfield;
private int sortfieldLast;
private boolean sortingDescending;
private Boolean sortingDescendingLast;
private boolean sortable;
private int lastRowCount;
private boolean fieldChanged;
private Menu mRow;
private ModifyListener lsMod, lsUndo, lsContent;
private Clipboard clipboard;
// The following Image and Graphics Context are used for font metrics. We only
// want them created once.
private static Image dummyImage;
private static GC dummyGC;
private Font gridFont;
// private int last_carret_position;
private ArrayList<TransAction> undo;
private int undoPosition;
private String[] beforeEdit;
private MenuItem miEditUndo, miEditRedo;
private static final String CLIPBOARD_DELIMITER = "\t";
private Condition condition;
private Color defaultBackgroundColor;
private Map<String, Color> usedColors;
private ColumnInfo numberColumn;
protected int textWidgetCaretPosition;
private VariableSpace variables;
private boolean showingBlueNullValues;
private boolean showingConversionErrorsInline;
private boolean isTextButton = false;
private boolean addIndexColumn = true;
public TableView( VariableSpace space, Composite parent, int style, ColumnInfo[] columnInfo, int nrRows,
ModifyListener lsm, PropsUI pr ) {
this( space, parent, style, columnInfo, nrRows, false, lsm, pr );
}
public TableView( VariableSpace space, Composite parent, int style, ColumnInfo[] columnInfo, int nrRows,
boolean readOnly, ModifyListener lsm, PropsUI pr ) {
this( space, parent, style, columnInfo, nrRows, false, lsm, pr, true );
}
public TableView( VariableSpace space, Composite parent, int style, ColumnInfo[] columnInfo, int nrRows,
boolean readOnly, ModifyListener lsm, PropsUI pr, final boolean addIndexColumn ) {
super( parent, SWT.NO_BACKGROUND | SWT.NO_FOCUS | SWT.NO_MERGE_PAINTS | SWT.NO_RADIO_GROUP );
this.parent = parent;
this.columns = columnInfo;
this.rows = nrRows;
this.props = pr;
this.readonly = readOnly;
this.clipboard = null;
this.variables = space;
this.addIndexColumn = addIndexColumn;
sortfield = 0;
sortfieldLast = -1;
sortingDescending = false;
sortingDescendingLast = null;
sortable = true;
selectionStart = -1;
previousShift = false;
usedColors = new Hashtable<String, Color>();
condition = null;
lsMod = lsm;
clearUndo();
numberColumn = new ColumnInfo( "#", ColumnInfo.COLUMN_TYPE_TEXT, true, true );
ValueMetaInterface numberColumnValueMeta = new ValueMetaInteger( "#" );
numberColumnValueMeta.setConversionMask( "####0" );
numberColumn.setValueMeta( numberColumnValueMeta );
lsUndo = new ModifyListener() {
@Override
public void modifyText( ModifyEvent arg0 ) {
fieldChanged = true;
}
};
if ( TableView.dummyGC == null ) {
Display disp = parent.getDisplay();
TableView.dummyImage = new Image( disp, 1, 1 );
TableView.dummyGC = new GC( TableView.dummyImage );
gridFont = new Font( disp, props.getGridFont() );
TableView.dummyGC.setFont( gridFont );
}
FormLayout controlLayout = new FormLayout();
controlLayout.marginLeft = 0;
controlLayout.marginRight = 0;
controlLayout.marginTop = 0;
controlLayout.marginBottom = 0;
setLayout( controlLayout );
// setLayout(new GridLayout());
// Create table, add columns & rows...
table = new Table( this, style | SWT.MULTI );
props.setLook( table, Props.WIDGET_STYLE_TABLE );
table.setLinesVisible( true );
// table.setLayout(new FormLayout());
// table.setLayoutData(new GridData(GridData.FILL_BOTH));
FormData fdTable = new FormData();
fdTable.left = new FormAttachment( 0, 0 );
fdTable.right = new FormAttachment( 100, 0 );
fdTable.top = new FormAttachment( 0, 0 );
fdTable.bottom = new FormAttachment( 100, 0 );
table.setLayoutData( fdTable );
tablecolumn = new TableColumn[columns.length + 1];
tablecolumn[0] = new TableColumn( table, SWT.RIGHT );
tablecolumn[0].setResizable( true );
tablecolumn[0].setText( "#" );
tablecolumn[0].setWidth( addIndexColumn ? 25 : 0 );
tablecolumn[0].setAlignment( SWT.RIGHT );
for ( int i = 0; i < columns.length; i++ ) {
int allignment = columns[i].getAllignement();
tablecolumn[i + 1] = new TableColumn( table, allignment );
tablecolumn[i + 1].setResizable( true );
if ( columns[i].getName() != null ) {
tablecolumn[i + 1].setText( columns[i].getName() );
}
if ( columns[i].getToolTip() != null ) {
tablecolumn[i + 1].setToolTipText( ( columns[i].getToolTip() ) );
}
ValueMetaInterface valueMeta = columns[i].getValueMeta();
if ( valueMeta != null && valueMeta.isNumeric() ) {
tablecolumn[i + 1].setAlignment( SWT.RIGHT );
}
tablecolumn[i + 1].pack();
}
table.setHeaderVisible( true );
table.setLinesVisible( true );
// Set the default values...
if ( rows > 0 ) {
table.setItemCount( rows );
} else {
table.setItemCount( 1 );
}
// Get the background color of item 0, before anything happened with it,
// that's the default color.
defaultBackgroundColor = table.getItem( 0 ).getBackground();
setRowNums();
// Set the sort sign on the first column. (0)
table.setSortColumn( table.getColumn( sortfield ) );
table.setSortDirection( sortingDescending ? SWT.DOWN : SWT.UP );
// create a ControlEditor field to edit the contents of a cell
editor = new TableEditor( table );
editor.grabHorizontal = true;
editor.grabVertical = true;
mRow = new Menu( table );
MenuItem miRowInsBef = new MenuItem( mRow, SWT.NONE );
miRowInsBef.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.InsertBeforeRow" ) ) );
MenuItem miRowInsAft = new MenuItem( mRow, SWT.NONE );
miRowInsAft.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.InsertAfterRow" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miRowUp = new MenuItem( mRow, SWT.NONE );
miRowUp.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.MoveUp" ) ) );
MenuItem miRowDown = new MenuItem( mRow, SWT.NONE );
miRowDown.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.MoveDown" ) ) );
MenuItem miCol1 = new MenuItem( mRow, SWT.NONE );
miCol1.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.OptimalSizeWithHeader" ) ) );
MenuItem miCol2 = new MenuItem( mRow, SWT.NONE );
miCol2.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.OptimalSizeWithoutHeader" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miClear = new MenuItem( mRow, SWT.NONE );
miClear.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.ClearAll" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miSelAll = new MenuItem( mRow, SWT.NONE );
miSelAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.SelectAll" ) ) );
MenuItem miUnselAll = new MenuItem( mRow, SWT.NONE );
miUnselAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.ClearSelection" ) ) );
MenuItem miFilter = new MenuItem( mRow, SWT.NONE );
miFilter.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.FilteredSelection" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miClipAll = new MenuItem( mRow, SWT.NONE );
miClipAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.CopyToClipboard" ) ) );
MenuItem miPasteAll = new MenuItem( mRow, SWT.NONE );
miPasteAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.PasteFromClipboard" ) ) );
MenuItem miCutAll = new MenuItem( mRow, SWT.NONE );
miCutAll
.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.CutSelected" ) ) );
MenuItem miDelAll = new MenuItem( mRow, SWT.NONE );
miDelAll.setText( OsHelper.customizeMenuitemText( BaseMessages
.getString( PKG, "TableView.menu.DeleteSelected" ) ) );
MenuItem miKeep = new MenuItem( mRow, SWT.NONE );
miKeep
.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.KeepSelected" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miCopyToAll = new MenuItem( mRow, SWT.NONE );
miCopyToAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.CopyFieldToAllRows" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
miEditUndo = new MenuItem( mRow, SWT.NONE );
miEditRedo = new MenuItem( mRow, SWT.NONE );
setUndoMenu();
if ( readonly ) {
miRowInsBef.setEnabled( false );
miRowInsAft.setEnabled( false );
miRowUp.setEnabled( false );
miRowDown.setEnabled( false );
miClear.setEnabled( false );
miCopyToAll.setEnabled( false );
miPasteAll.setEnabled( false );
miDelAll.setEnabled( false );
miCutAll.setEnabled( false );
miKeep.setEnabled( false );
}
SelectionAdapter lsRowInsBef = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
insertRowBefore();
}
};
SelectionAdapter lsRowInsAft = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
insertRowAfter();
}
};
SelectionAdapter lsCol1 = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
optWidth( true );
}
};
SelectionAdapter lsCol2 = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
optWidth( false );
}
};
SelectionAdapter lsRowUp = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
moveRows( -1 );
}
};
SelectionAdapter lsRowDown = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
moveRows( +1 );
}
};
SelectionAdapter lsClear = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
clearAll( true );
}
};
SelectionAdapter lsClipAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
clipSelected();
}
};
SelectionAdapter lsCopyToAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
copyToAll();
}
};
SelectionAdapter lsSelAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
selectAll();
}
};
SelectionAdapter lsUnselAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
unselectAll();
}
};
SelectionAdapter lsPasteAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
pasteSelected();
}
};
SelectionAdapter lsCutAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
cutSelected();
}
};
SelectionAdapter lsDelAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
delSelected();
}
};
SelectionAdapter lsKeep = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
keepSelected();
}
};
SelectionAdapter lsFilter = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
setFilter();
}
};
SelectionAdapter lsEditUndo = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
undoAction();
}
};
SelectionAdapter lsEditRedo = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
redoAction();
}
};
miRowInsBef.addSelectionListener( lsRowInsBef );
miRowInsAft.addSelectionListener( lsRowInsAft );
miCol1.addSelectionListener( lsCol1 );
miCol2.addSelectionListener( lsCol2 );
miRowUp.addSelectionListener( lsRowUp );
miRowDown.addSelectionListener( lsRowDown );
miClear.addSelectionListener( lsClear );
miClipAll.addSelectionListener( lsClipAll );
miCopyToAll.addSelectionListener( lsCopyToAll );
miSelAll.addSelectionListener( lsSelAll );
miUnselAll.addSelectionListener( lsUnselAll );
miPasteAll.addSelectionListener( lsPasteAll );
miCutAll.addSelectionListener( lsCutAll );
miDelAll.addSelectionListener( lsDelAll );
miKeep.addSelectionListener( lsKeep );
miFilter.addSelectionListener( lsFilter );
miEditUndo.addSelectionListener( lsEditUndo );
miEditRedo.addSelectionListener( lsEditRedo );
table.setMenu( mRow );
lsFocusText = new FocusAdapter() {
@Override
public void focusLost( FocusEvent e ) {
final Display d = Display.getCurrent();
if ( table.isDisposed() ) {
return;
}
final TableItem row = activeTableItem;
if ( row == null ) {
return;
}
final int colnr = activeTableColumn;
final int rownr = table.indexOf( row );
final Control ftext = text;
final String[] fBeforeEdit = beforeEdit;
// Save the position of the caret for the focus-dropping popup-dialogs
// The content is then in contentDestination
textWidgetCaretPosition = getTextWidgetCaretPosition( colnr );
final String value = getTextWidgetValue( colnr );
final Runnable worker = new Runnable() {
@Override
public void run() {
try {
if ( row.isDisposed() ) {
return;
}
row.setText( colnr, value );
ftext.dispose();
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ fBeforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
} catch ( Exception ignored ) {
// widget is disposed, ignore
}
}
};
// force the immediate update
if ( !row.isDisposed() ) {
row.setText( colnr, value );
}
if ( columns[colnr - 1].getType() == ColumnInfo.COLUMN_TYPE_TEXT_BUTTON ) {
try {
Thread.sleep( 500 );
} catch ( InterruptedException ignored ) {
}
Runnable r = new Runnable() {
@Override
public void run() {
d.asyncExec( worker );
}
};
Thread t = new Thread( r );
t.start();
} else {
worker.run();
}
}
};
lsFocusCombo = new FocusAdapter() {
@Override
public void focusLost( FocusEvent e ) {
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int colnr = activeTableColumn;
int rownr = table.indexOf( row );
if ( colnr > 0 ) {
try {
row.setText( colnr, combo.getText() );
} catch ( Exception exc ) {
// Eat widget disposed error
}
String[] afterEdit = getItemText( row );
if ( afterEdit != null ) {
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
}
}
combo.dispose();
}
};
lsModCombo = new ModifyListener() {
@Override
public void modifyText( ModifyEvent e ) {
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int colnr = activeTableColumn;
int rownr = table.indexOf( row );
row.setText( colnr, combo.getText() );
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
}
};
// Catch the keys pressed when editing a Text-field...
lsKeyText = new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
boolean right = false;
boolean left = false;
/*
* left = e.keyCode == SWT.ARROW_LEFT && last_carret_position==0;
*
* if (text!=null && !text.isDisposed()) right = e.keyCode == SWT.ARROW_RIGHT &&
* last_carret_position==text.getText().length();
*/
// "ENTER": close the text editor and copy the data over
// We edit the data after moving to another cell, only if editNextCell =
// true;
if ( e.character == SWT.CR
|| e.keyCode == SWT.ARROW_DOWN || e.keyCode == SWT.ARROW_UP || e.keyCode == SWT.TAB || left || right ) {
if ( activeTableItem == null ) {
return;
}
applyTextChange( activeTableItem, activeTableRow, activeTableColumn );
int maxcols = table.getColumnCount();
int maxrows = table.getItemCount();
boolean editNextCell = false;
if ( e.keyCode == SWT.ARROW_DOWN && activeTableRow < maxrows - 1 ) {
activeTableRow++;
editNextCell = true;
}
if ( e.keyCode == SWT.ARROW_UP && activeTableRow > 0 ) {
activeTableRow--;
editNextCell = true;
}
// TAB
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) == 0 ) ) || right ) {
activeTableColumn++;
editNextCell = true;
}
// Shift Tab
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) != 0 ) ) || left ) {
activeTableColumn--;
editNextCell = true;
}
if ( activeTableColumn < 1 ) { // from SHIFT-TAB
activeTableColumn = maxcols - 1;
if ( activeTableRow > 0 ) {
activeTableRow--;
}
}
if ( activeTableColumn >= maxcols ) { // from TAB
activeTableColumn = 1;
activeTableRow++;
}
// Tab beyond last line: add a line to table!
if ( activeTableRow >= maxrows ) {
TableItem item = new TableItem( table, SWT.NONE, activeTableRow );
item.setText( 1, "" );
setRowNums();
}
activeTableItem = table.getItem( activeTableRow ); // just to make sure!
if ( editNextCell ) {
edit( activeTableRow, activeTableColumn );
} else {
if ( e.keyCode == SWT.ARROW_DOWN && activeTableRow == maxrows - 1 ) {
insertRowAfter();
}
}
} else if ( e.keyCode == SWT.ESC ) {
text.dispose();
// setFocus();
table.setFocus();
}
// last_carret_position = text.isDisposed()?-1:text.getCaretPosition();
}
};
// Catch the keys pressed when editing a Combo field
lsKeyCombo = new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
boolean ctrl = ( ( e.stateMask & SWT.MOD1 ) != 0 );
// CTRL-V --> Paste selected infomation...
if ( e.keyCode == 'v' && ctrl ) {
e.doit = false;
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
clipboard = new Clipboard( getDisplay() );
TextTransfer tran = TextTransfer.getInstance();
String text = (String) clipboard.getContents( tran );
combo.setText( text );
return;
}
boolean right = false;
boolean left = false;
// "ENTER": close the text editor and copy the data over
if ( e.keyCode == SWT.CR || e.keyCode == SWT.TAB || left || right ) {
if ( activeTableItem == null ) {
return;
}
applyComboChange( activeTableItem, activeTableRow, activeTableColumn );
String[] afterEdit = getItemText( activeTableItem );
checkChanged(
new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ activeTableRow } );
int maxcols = table.getColumnCount();
int maxrows = table.getItemCount();
boolean sel = false;
// TAB
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) == 0 ) ) || right ) {
activeTableColumn++;
sel = true;
}
// Shift Tab
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) != 0 ) ) || right ) {
activeTableColumn--;
sel = true;
}
if ( activeTableColumn < 1 ) { // from SHIFT-TAB
activeTableColumn = maxcols - 1;
if ( activeTableRow > 0 ) {
activeTableRow--;
}
}
if ( activeTableColumn >= maxcols ) { // from TAB
activeTableColumn = 1;
activeTableRow++;
}
// Tab beyond last line: add a line to table!
if ( activeTableRow >= maxrows ) {
TableItem item = new TableItem( table, SWT.NONE, activeTableRow );
item.setText( 1, "" );
setRowNums();
}
if ( sel ) {
edit( activeTableRow, activeTableColumn );
}
table.setFocus();
} else if ( e.keyCode == SWT.ESC ) {
if ( activeTableItem != null ) {
activeTableItem.setText( activeTableColumn, beforeEdit[activeTableColumn - 1] );
}
combo.dispose();
table.setFocus();
e.doit = false;
}
// last_carret_position = combo.isDisposed()?-1:0;
}
};
/*
* It seems there is an other keyListener active to help control the cursor. There is support for keys like
* LEFT/RIGHT/UP/DOWN/HOME/END/etc It presents us with a problem because we only get the position of the row/column
* AFTER the other listener did it's job. Therefor we added global variables prev_rownr and prev_colnr
*/
KeyListener lsKeyTable = new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
if ( activeTableItem == null ) {
return;
}
int maxcols = table.getColumnCount();
int maxrows = table.getItemCount();
boolean shift = ( e.stateMask & SWT.SHIFT ) != 0;
if ( !previousShift && shift || selectionStart < 0 ) {
// Shift is pressed down: reset start of selection
// No start of selection known? reset as well.
selectionStart = activeTableRow;
}
previousShift = shift;
boolean ctrl = ( ( e.stateMask & SWT.MOD1 ) != 0 );
// Move rows up or down shortcuts...
if ( !readonly && e.keyCode == SWT.ARROW_DOWN && ctrl ) {
moveRows( +1 );
e.doit = false;
return;
}
if ( !readonly && e.keyCode == SWT.ARROW_UP && ctrl ) {
moveRows( -1 );
e.doit = false;
return;
}
// Select extra row down
if ( e.keyCode == SWT.ARROW_DOWN && shift ) {
activeTableRow++;
if ( activeTableRow >= maxrows ) {
activeTableRow = maxrows - 1;
}
selectRows( selectionStart, activeTableRow );
// activeTableItem = table.getItem(activeTableRow);
table.showItem( table.getItem( activeTableRow ) );
e.doit = false;
return;
}
// Select extra row up
if ( e.keyCode == SWT.ARROW_UP && shift ) {
activeTableRow--;
if ( activeTableRow < 0 ) {
activeTableRow = 0;
}
selectRows( activeTableRow, selectionStart );
// activeTableItem = table.getItem(activeTableRow);
table.showItem( table.getItem( activeTableRow ) );
e.doit = false;
return;
}
// Select all rows until end
if ( e.keyCode == SWT.HOME && shift ) {
activeTableRow = 0;
// Select all indeces from "from_selection" to "row"
selectRows( selectionStart, activeTableRow );
table.showItem( activeTableItem );
e.doit = false;
return;
}
// Select extra row up
if ( e.keyCode == SWT.END && shift ) {
activeTableRow = maxrows;
selectRows( selectionStart, activeTableRow );
table.showItem( activeTableItem );
e.doit = false;
return;
}
// Move cursor: set selection on the row in question.
if ( ( e.keyCode == SWT.ARROW_DOWN && !shift )
|| ( e.keyCode == SWT.ARROW_UP && !shift ) || ( e.keyCode == SWT.HOME && !shift )
|| ( e.keyCode == SWT.END && !shift ) ) {
switch ( e.keyCode ) {
case SWT.ARROW_DOWN:
activeTableRow++;
if ( activeTableRow >= maxrows ) {
if ( !readonly ) {
insertRowAfter();
} else {
activeTableRow = maxrows - 1;
}
}
break;
case SWT.ARROW_UP:
activeTableRow--;
if ( activeTableRow < 0 ) {
activeTableRow = 0;
}
break;
case SWT.HOME:
activeTableRow = 0;
break;
case SWT.END:
activeTableRow = maxrows - 1;
break;
default:
break;
}
setPosition( activeTableRow, activeTableColumn );
table.deselectAll();
table.select( activeTableRow );
table.showItem( table.getItem( activeTableRow ) );
e.doit = false;
return;
}
// CTRL-A --> Select All lines
if ( e.keyCode == 'a' && ctrl ) {
e.doit = false;
selectAll();
return;
}
// ESC --> unselect all
if ( e.keyCode == SWT.ESC ) {
e.doit = false;
unselectAll();
selectRows( activeTableRow, activeTableRow );
setFocus();
// table.setFocus();
return;
}
// CTRL-C --> Copy selected lines to clipboard
if ( e.keyCode == 'c' && ctrl ) {
e.doit = false;
clipSelected();
return;
}
// CTRL-K --> keep only selected lines
if ( !readonly && e.keyCode == 'k' && ctrl ) {
e.doit = false;
keepSelected();
return;
}
// CTRL-X --> Cut selected infomation...
if ( !readonly && e.keyCode == 'x' && ctrl ) {
e.doit = false;
cutSelected();
return;
}
// CTRL-V --> Paste selected infomation...
if ( !readonly && e.keyCode == 'v' && ctrl ) {
e.doit = false;
pasteSelected();
return;
}
// F3 --> optimal width including headers
if ( e.keyCode == SWT.F3 ) {
e.doit = false;
optWidth( true );
return;
}
// DEL --> delete selected lines
if ( !readonly && e.keyCode == SWT.DEL ) {
e.doit = false;
delSelected();
return;
}
// F4 --> optimal width excluding headers
if ( e.keyCode == SWT.F4 ) {
e.doit = false;
optWidth( false );
return;
}
// CTRL-Y --> redo action
if ( e.keyCode == 'y' && ctrl ) {
e.doit = false;
redoAction();
return;
}
// CTRL-Z --> undo action
if ( e.keyCode == 'z' && ctrl ) {
e.doit = false;
undoAction();
return;
}
// Return: edit the first field in the row.
if ( e.keyCode == SWT.CR || e.keyCode == SWT.ARROW_RIGHT || e.keyCode == SWT.TAB ) {
activeTableColumn = 1;
edit( activeTableRow, activeTableColumn );
e.doit = false;
return;
}
if ( activeTableColumn > 0 ) {
boolean textChar =
( e.character >= 'a' && e.character <= 'z' )
|| ( e.character >= 'A' && e.character <= 'Z' ) || ( e.character >= '0' && e.character <= '9' )
|| ( e.character == ' ' ) || ( e.character == '_' ) || ( e.character == ',' )
|| ( e.character == '.' ) || ( e.character == '+' ) || ( e.character == '-' )
|| ( e.character == '*' ) || ( e.character == '/' ) || ( e.character == ';' );
// setSelection(row, rownr, colnr);
// character a-z, A-Z, 0-9: start typing...
if ( e.character == SWT.CR || e.keyCode == SWT.F2 || textChar ) {
boolean selectText = true;
char extraChar = 0;
if ( textChar ) {
extraChar = e.character;
selectText = false;
}
e.doit = false;
edit( activeTableRow, activeTableColumn, selectText, extraChar );
}
if ( e.character == SWT.TAB ) {
// TAB
if ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) == 0 ) ) {
activeTableColumn++;
}
// Shift Tab
if ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) != 0 ) ) {
activeTableColumn--;
}
if ( activeTableColumn < 1 ) { // from SHIFT-TAB
activeTableColumn = maxcols - 1;
if ( activeTableRow > 0 ) {
activeTableRow--;
}
}
if ( activeTableColumn >= maxcols ) { // from TAB
activeTableColumn = 1;
activeTableRow++;
}
// Tab beyond last line: add a line to table!
if ( activeTableRow >= maxrows ) {
TableItem item = new TableItem( table, SWT.NONE, activeTableRow );
item.setText( 1, "" );
setRowNums();
}
// row = table.getItem(rownr);
e.doit = false;
edit( activeTableRow, activeTableColumn );
}
}
setFocus();
table.setFocus();
}
};
table.addKeyListener( lsKeyTable );
// Table listens to the mouse:
MouseAdapter lsMouseT = new MouseAdapter() {
@Override
public void mouseDown( MouseEvent event ) {
if ( activeTableItem != null
&& editor != null
&& editor.getEditor() != null
&& !editor.getEditor().isDisposed() ) {
if ( activeTableColumn > 0 ) {
switch ( columns[activeTableColumn - 1].getType() ) {
case ColumnInfo.COLUMN_TYPE_TEXT:
applyTextChange( activeTableItem, activeTableRow, activeTableColumn );
break;
case ColumnInfo.COLUMN_TYPE_CCOMBO:
applyComboChange( activeTableItem, activeTableRow, activeTableColumn );
break;
}
}
}
//if ( event.button == 1 ) {
boolean rightClick = event.button == 3;
if ( event.button == 1 || rightClick ) {
boolean shift = ( event.stateMask & SWT.SHIFT ) != 0;
boolean control = ( event.stateMask & SWT.MOD1 ) != 0;
if ( !shift && !control ) {
Rectangle clientArea = table.getClientArea();
Point pt = new Point( event.x, event.y );
int index = table.getTopIndex();
while ( index < table.getItemCount() ) {
boolean visible = false;
final TableItem item = table.getItem( index );
for ( int i = 0; i < table.getColumnCount(); i++ ) {
Rectangle rect = item.getBounds( i );
if ( rect.contains( pt ) ) {
activeTableItem = item;
activeTableColumn = i;
activeTableRow = index;
if ( !rightClick ) {
editSelected();
}
return;
} else {
if ( i == table.getColumnCount() - 1 && // last column
pt.x > rect.x + rect.width && // to the right
pt.y >= rect.y && pt.y <= rect.y + rect.height // same
// height
// as this
// visible
// item
) {
return; // don't do anything when clicking to the right of
// the grid.
}
}
if ( !visible && rect.intersects( clientArea ) ) {
visible = true;
}
}
if ( !visible ) {
return;
}
index++;
}
if ( rightClick ) {
return;
}
// OK, so they clicked in the table and we did not go into the
// invisible: below the last line!
// Position on last row, 1st column and add a new line...
setPosition( table.getItemCount() - 1, 1 );
insertRowAfter();
}
}
}
};
table.addMouseListener( lsMouseT );
// Add support for sorted columns!
//
final int nrcols = tablecolumn.length;
for ( int i = 0; i < nrcols; i++ ) {
final int colnr = i;
Listener lsSort = new Listener() {
@Override
public void handleEvent( Event e ) {
// Sorting means: clear undo information!
clearUndo();
sortTable( colnr );
}
};
tablecolumn[i].addListener( SWT.Selection, lsSort );
}
lsTraverse = new TraverseListener() {
@Override
public void keyTraversed( TraverseEvent e ) {
e.doit = false;
}
};
table.addTraverseListener( lsTraverse );
// cursor.addTraverseListener(lsTraverse);
// Clean up the clipboard
addDisposeListener( new DisposeListener() {
@Override
public void widgetDisposed( DisposeEvent e ) {
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
if ( gridFont != null ) {
gridFont.dispose();
}
}
} );
// Drag & drop source!
// Drag & Drop for table-viewer
Transfer[] ttypes = new Transfer[]{ TextTransfer.getInstance() };
DragSource ddSource = new DragSource( table, DND.DROP_MOVE | DND.DROP_COPY );
ddSource.setTransfer( ttypes );
ddSource.addDragListener( new DragSourceListener() {
@Override
public void dragStart( DragSourceEvent event ) {
}
@Override
public void dragSetData( DragSourceEvent event ) {
event.data = "TableView" + Const.CR + getSelectedText();
}
@Override
public void dragFinished( DragSourceEvent event ) {
}
} );
table.layout();
table.pack();
optWidth( true );
layout();
pack();
}
protected String getTextWidgetValue( int colNr ) {
boolean b = columns[colNr - 1].isUsingVariables();
if ( b ) {
return ( (TextVar) text ).getText();
} else {
return ( (Text) text ).getText();
}
}
protected int getTextWidgetCaretPosition( int colNr ) {
boolean b = columns[colNr - 1].isUsingVariables();
if ( b ) {
return ( (TextVar) text ).getTextWidget().getCaretPosition();
} else {
return ( (Text) text ).getCaretPosition();
}
}
public void sortTable( int colnr ) {
if ( !sortable ) {
return;
}
if ( sortfield == colnr ) {
sortingDescending = ( !sortingDescending );
} else {
sortfield = colnr;
sortingDescending = false;
}
sortTable( sortfield, sortingDescending );
}
public void setSelection( int[] selectedItems ) {
table.select( selectedItems );
}
public void sortTable( int sortField, boolean sortingDescending ) {
boolean shouldRefresh = false;
if ( this.sortfieldLast == -1 && this.sortingDescendingLast == null ) {
// first time through, so update
shouldRefresh = true;
this.sortfieldLast = this.sortfield;
this.sortingDescendingLast = new Boolean( this.sortingDescending );
this.sortfield = sortField;
this.sortingDescending = sortingDescending;
}
if ( sortfieldLast != this.sortfield ) {
this.sortfieldLast = this.sortfield;
this.sortfield = sortField;
shouldRefresh = true;
}
if ( sortingDescendingLast != this.sortingDescending ) {
this.sortingDescendingLast = this.sortingDescending;
this.sortingDescending = sortingDescending;
shouldRefresh = true;
}
if ( !shouldRefresh && table.getItemCount() == lastRowCount ) {
return;
}
try {
// First, get all info and put it in a Vector of Rows...
TableItem[] items = table.getItems();
List<Object[]> v = new ArrayList<Object[]>();
// First create the row metadata for the grid
//
final RowMetaInterface rowMeta = new RowMeta();
// First values are the color name + value!
rowMeta.addValueMeta( new ValueMetaString( "colorname" ) );
rowMeta.addValueMeta( new ValueMetaInteger( "color" ) );
for ( int j = 0; j < table.getColumnCount(); j++ ) {
ColumnInfo colInfo;
if ( j > 0 ) {
colInfo = columns[j - 1];
} else {
colInfo = numberColumn;
}
ValueMetaInterface valueMeta = colInfo.getValueMeta();
if ( j == sortField ) {
valueMeta.setSortedDescending( sortingDescending );
}
rowMeta.addValueMeta( valueMeta );
}
final RowMetaInterface sourceRowMeta = rowMeta.cloneToType( ValueMetaInterface.TYPE_STRING );
final RowMetaInterface conversionRowMeta = rowMeta.clone();
// Set it all to string...
// Also set the storage value metadata: this will allow us to convert back
// and forth without a problem.
//
for ( int i = 0; i < sourceRowMeta.size(); i++ ) {
ValueMetaInterface sourceValueMeta = sourceRowMeta.getValueMeta( i );
sourceValueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
ValueMetaInterface conversionMetaData = conversionRowMeta.getValueMeta( i );
conversionMetaData.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
// Meaning: this string comes from an Integer/Number/Date/etc.
//
sourceRowMeta.getValueMeta( i ).setConversionMetadata( conversionMetaData );
}
// Now populate a list of data rows...
//
for ( int i = 0; i < items.length; i++ ) {
TableItem item = items[i];
Object[] r = new Object[table.getColumnCount() + 2];
// First values are the color name + value!
Color bg = item.getBackground();
if ( !bg.equals( defaultBackgroundColor ) ) {
String colorName = "bg " + bg.toString();
r[0] = colorName;
r[1] = new Long( ( bg.getRed() << 16 ) + ( bg.getGreen() << 8 ) + ( bg.getBlue() ) );
// Save it in the used colors map!
usedColors.put( colorName, bg );
}
for ( int j = 0; j < table.getColumnCount(); j++ ) {
String data = item.getText( j );
if ( GUIResource.getInstance().getColorBlue().equals( item.getForeground( j ) ) ) {
data = null;
}
ValueMetaInterface sourceValueMeta = sourceRowMeta.getValueMeta( j + 2 );
try {
r[j + 2] = sourceValueMeta.convertDataUsingConversionMetaData( data );
} catch ( Exception e ) {
if ( isShowingConversionErrorsInline() ) {
r[j + 2] = Const.getStackTracker( e );
} else {
throw e;
}
}
}
v.add( r );
}
final int[] sortIndex = new int[]{ sortField + 2 };
// Sort the vector!
Collections.sort( v, new Comparator<Object[]>() {
@Override
public int compare( Object[] r1, Object[] r2 ) {
try {
return conversionRowMeta.compare( r1, r2, sortIndex );
} catch ( KettleValueException e ) {
throw new RuntimeException( "Error comparing rows", e );
}
}
} );
// Clear the table
table.removeAll();
// Refill the table
for ( int i = 0; i < v.size(); i++ ) {
Object[] r = v.get( i );
TableItem item = new TableItem( table, SWT.NONE );
String colorName = (String) r[0];
Long colorValue = (Long) r[1];
if ( colorValue != null ) {
// Get it from the map
//
Color bg = usedColors.get( colorName );
if ( bg != null ) {
item.setBackground( bg );
}
}
for ( int j = 2; j < r.length; j++ ) {
String string = conversionRowMeta.getString( r, j );
if ( showingBlueNullValues && string == null ) {
string = "<null>";
item.setForeground( j - 2, GUIResource.getInstance().getColorBlue() );
} else {
item.setForeground( j - 2, GUIResource.getInstance().getColorBlack() );
}
if ( string != null ) {
item.setText( j - 2, string );
}
}
}
table.setSortColumn( table.getColumn( sortfield ) );
table.setSortDirection( sortingDescending ? SWT.DOWN : SWT.UP );
lastRowCount = table.getItemCount();
} catch ( Exception e ) {
new ErrorDialog( this.getShell(), BaseMessages.getString( PKG, "TableView.ErrorDialog.title" ), BaseMessages
.getString( PKG, "TableView.ErrorDialog.description" ), e );
}
}
private void selectRows( int from, int to ) {
table.deselectAll();
if ( from == to ) {
table.select( from );
} else {
if ( from > to ) {
table.select( to, from );
} else {
table.select( from, to );
}
}
}
private void applyTextChange( TableItem row, int rownr, int colnr ) {
String textData = getTextWidgetValue( colnr );
row.setText( colnr, textData );
text.dispose();
table.setFocus();
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
selectionStart = -1;
fireContentChangedListener( rownr, colnr, textData );
}
/**
* Inform the content listener that content changed.
*
* @param rownr
* @param colnr
* @param textData
*/
private void fireContentChangedListener( int rownr, int colnr, String textData ) {
if ( lsContent != null ) {
Event event = new Event();
event.data = textData;
event.widget = table;
event.x = rownr;
event.y = colnr;
lsContent.modifyText( new ModifyEvent( event ) );
}
}
private void applyComboChange( TableItem row, int rownr, int colnr ) {
String textData = combo.getText();
row.setText( colnr, textData );
combo.dispose();
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
selectionStart = -1;
fireContentChangedListener( rownr, colnr, textData );
}
public void addModifyListener( ModifyListener ls ) {
lsMod = ls;
}
public void setColumnInfo( int idx, ColumnInfo col ) {
columns[idx] = col;
}
public void setColumnText( int idx, String text ) {
TableColumn col = table.getColumn( idx );
col.setText( text );
}
public void setColumnToolTip( int idx, String text ) {
columns[idx].setToolTip( text );
}
private void editSelected() {
if ( activeTableItem == null ) {
return;
}
if ( activeTableColumn > 0 ) {
edit( activeTableRow, activeTableColumn );
} else {
selectRows( activeTableRow, activeTableRow );
}
}
private void checkChanged( String[][] before, String[][] after, int[] index ) {
// Did we change anything: if so, add undo information
if ( fieldChanged ) {
TransAction ta = new TransAction();
ta.setChanged( before, after, index );
addUndo( ta );
}
}
private void setModified() {
if ( lsMod != null ) {
Event e = new Event();
e.widget = this;
lsMod.modifyText( new ModifyEvent( e ) );
}
}
private void insertRowBefore() {
if ( readonly ) {
return;
}
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int rownr = table.indexOf( row );
TableItem item = new TableItem( table, SWT.NONE, rownr );
item.setText( 1, "" );
// Add undo information
TransAction ta = new TransAction();
String[] str = getItemText( item );
ta.setNew( new String[][]{ str }, new int[]{ rownr } );
addUndo( ta );
setRowNums();
edit( rownr, 1 );
}
private void insertRowAfter() {
if ( readonly ) {
return;
}
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int rownr = table.indexOf( row );
TableItem item = new TableItem( table, SWT.NONE, rownr + 1 );
item.setText( 1, "" );
// Add undo information
TransAction ta = new TransAction();
String[] str = getItemText( item );
ta.setNew( new String[][]{ str }, new int[]{ rownr + 1 } );
addUndo( ta );
setRowNums();
edit( rownr + 1, 1 );
}
public void clearAll() {
clearAll( false );
}
public void clearAll( boolean ask ) {
int id = SWT.YES;
if ( ask ) {
MessageBox mb = new MessageBox( parent.getShell(), SWT.YES | SWT.NO | SWT.ICON_QUESTION );
mb.setMessage( BaseMessages.getString( PKG, "TableView.MessageBox.ClearTable.message" ) );
mb.setText( BaseMessages.getString( PKG, "TableView.MessageBox.ClearTable.title" ) );
id = mb.open();
}
if ( id == SWT.YES ) {
table.removeAll();
new TableItem( table, SWT.NONE );
if ( !readonly ) {
edit( 0, 1 );
}
this.setModified(); // timh
}
}
private void moveRows( int offset ) {
if ( ( offset != 1 ) && ( offset != -1 ) ) {
return;
}
int[] selectionIndicies = table.getSelectionIndices();
int selectedIndex = table.getSelectionIndex();
// selectionIndicies is not guaranteed to be in any order so must sort
// before using
Arrays.sort( selectionIndicies );
if ( offset == 1 ) {
if ( selectionIndicies[selectionIndicies.length - 1] >= table.getItemCount() - 1 ) {
// If the last row in the table is selected then don't move any rows
// down
return;
}
selectionIndicies = moveRowsDown( selectionIndicies );
} else {
if ( selectionIndicies[0] == 0 ) {
// If the first row in the table is selected then don't move any rows up
return;
}
selectionIndicies = moveRowsUp( selectionIndicies );
}
activeTableRow = selectedIndex + offset;
table.setSelection( activeTableRow );
table.setSelection( selectionIndicies );
activeTableItem = table.getItem( activeTableRow );
}
private int[] moveRowsDown( int[] selectionIndicies ) {
// Move the selected rows down starting with the lowest row
for ( int i = selectionIndicies.length - 1; i >= 0; i-- ) {
int row = selectionIndicies[i];
int newRow = row + 1;
moveRow( row, newRow );
TransAction ta = new TransAction();
ta.setItemMove( new int[]{ row }, new int[]{ newRow } );
addUndo( ta );
selectionIndicies[i] = newRow;
}
return selectionIndicies;
}
private int[] moveRowsUp( int[] selectionIndicies ) {
// Move the selected rows up starting with the highest row
for ( int i = 0; i < selectionIndicies.length; i++ ) {
int row = selectionIndicies[i];
int newRow = row - 1;
moveRow( row, newRow );
TransAction ta = new TransAction();
ta.setItemMove( new int[]{ row }, new int[]{ newRow } );
addUndo( ta );
selectionIndicies[i] = newRow;
}
return selectionIndicies;
}
private void moveRow( int from, int to ) {
TableItem rowfrom = table.getItem( from );
TableItem rowto = table.getItem( to );
// Grab the strings on that line...
String[] strfrom = getItemText( rowfrom );
String[] strto = getItemText( rowto );
// Copy the content
for ( int i = 0; i < strfrom.length; i++ ) {
rowfrom.setText( i + 1, strto[i] );
rowto.setText( i + 1, strfrom[i] );
}
setModified();
}
private void copyToAll() {
TableItem row = activeTableItem;
if ( row == null || row.isDisposed() ) {
return;
}
int colnr = activeTableColumn;
if ( colnr == 0 ) {
return;
}
String str = row.getText( colnr );
// Get undo information: all columns
int size = table.getItemCount();
String[][] before = new String[size][];
String[][] after = new String[size][];
int[] index = new int[size];
for ( int i = 0; i < table.getItemCount(); i++ ) {
TableItem item = table.getItem( i );
index[i] = i;
before[i] = getItemText( item );
item.setText( colnr, str );
after[i] = getItemText( item );
}
// Add the undo information!
TransAction ta = new TransAction();
ta.setChanged( before, after, index );
addUndo( ta );
}
private void selectAll() {
table.selectAll();
}
private void unselectAll() {
table.deselectAll();
}
private void clipSelected() {
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
clipboard = new Clipboard( getDisplay() );
TextTransfer tran = TextTransfer.getInstance();
String clip = getSelectedText();
if ( clip == null ) {
return;
}
clipboard.setContents( new String[]{ clip }, new Transfer[]{ tran } );
}
private String getSelectedText() {
String selection = "";
for ( int c = 1; c < table.getColumnCount(); c++ ) {
TableColumn tc = table.getColumn( c );
if ( c > 1 ) {
selection += CLIPBOARD_DELIMITER;
}
selection += tc.getText();
}
selection += Const.CR;
TableItem[] items = table.getSelection();
if ( items.length == 0 ) {
return null;
}
for ( int r = 0; r < items.length; r++ ) {
TableItem ti = items[r];
for ( int c = 1; c < table.getColumnCount(); c++ ) {
if ( c > 1 ) {
selection += CLIPBOARD_DELIMITER;
}
selection += ti.getText( c );
}
selection += Const.CR;
}
return selection;
}
/*
* Example: ----------------------------------------------------------------- Field in stream;Dimension field
* TIME;TIME DATA_TYPE;DATA_TYPE MAP_TYPE;MAP_TYPE RESOLUTION;RESOLUTION START_TIME;START_TIME
* -----------------------------------------------------------------
*
* !! Paste at the end of the table! --> Create new table item for every line
*/
private int getCurrentRownr() {
if ( table.getItemCount() <= 1 ) {
return 0;
}
TableItem row = activeTableItem;
if ( row == null ) {
return 0;
}
int rownr = table.indexOf( row );
if ( rownr < 0 ) {
rownr = 0;
}
return rownr;
}
private void pasteSelected() {
int rownr = getCurrentRownr();
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
clipboard = new Clipboard( getDisplay() );
TextTransfer tran = TextTransfer.getInstance();
String text = (String) clipboard.getContents( tran );
if ( text != null ) {
String[] lines = text.split( Const.CR );
if ( lines.length > 1 ) {
// ALlocate complete paste grid!
String[][] grid = new String[lines.length - 1][];
int[] idx = new int[lines.length - 1];
for ( int i = 1; i < lines.length; i++ ) {
grid[i - 1] = lines[i].split( "\t" );
idx[i - 1] = rownr + i;
addItem( idx[i - 1], grid[i - 1] );
}
TransAction ta = new TransAction();
ta.setNew( grid, idx );
addUndo( ta );
}
if ( rownr == 0 && table.getItemCount() > rownr + 1 ) {
// Empty row at rownr?
// Remove it!
if ( isEmpty( rownr, -1 ) ) {
table.remove( rownr );
}
}
setRowNums();
unEdit();
setModified();
}
}
private void addItem( int pos, String[] str ) {
TableItem item = new TableItem( table, SWT.NONE, pos );
for ( int i = 0; i < str.length; i++ ) {
item.setText( i + 1, str[i] );
}
setModified();
}
private void cutSelected() {
clipSelected(); // copy selected lines to clipboard
delSelected();
}
private void delSelected() {
if ( nrNonEmpty() == 0 ) {
return;
}
// Which items do we delete?
int[] items = table.getSelectionIndices();
if ( items.length == 0 ) {
return;
}
// Save undo information
String[][] before = new String[items.length][];
for ( int i = 0; i < items.length; i++ ) {
TableItem ti = table.getItem( items[i] );
before[i] = getItemText( ti );
}
TransAction ta = new TransAction();
ta.setDelete( before, items );
addUndo( ta );
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int rowbefore = table.indexOf( row );
// Delete selected items.
table.remove( items );
if ( table.getItemCount() == 0 ) {
TableItem item = new TableItem( table, SWT.NONE );
// Save undo infomation!
String[] stritem = getItemText( item );
ta = new TransAction();
ta.setNew( new String[][]{ stritem }, new int[]{ 0 } );
addUndo( ta );
}
// If the last row is gone, put the selection back on last-1!
if ( rowbefore >= table.getItemCount() ) {
rowbefore = table.getItemCount() - 1;
}
// After the delete, we put the cursor on the same row as before (if we can)
if ( rowbefore < table.getItemCount() && table.getItemCount() > 0 ) {
setPosition( rowbefore, 1 );
table.setSelection( rowbefore );
activeTableRow = rowbefore;
}
setRowNums();
setModified();
}
private void keepSelected() {
// Which items are selected?
int[] sels = table.getSelectionIndices();
int size = table.getItemCount();
// Which items do we delete?
int[] items = new int[size - sels.length];
if ( items.length == 0 ) {
return; // everything is selected: keep everything, do nothing.
}
// Set the item-indices to delete...
int nr = 0;
for ( int i = 0; i < table.getItemCount(); i++ ) {
boolean selected = false;
for ( int j = 0; j < sels.length && !selected; j++ ) {
if ( sels[j] == i ) {
selected = true;
}
}
if ( !selected ) {
items[nr] = i;
nr++;
}
}
// Save undo information
String[][] before = new String[items.length][];
for ( int i = 0; i < items.length; i++ ) {
TableItem ti = table.getItem( items[i] );
before[i] = getItemText( ti );
}
TransAction ta = new TransAction();
ta.setDelete( before, items );
addUndo( ta );
// Delete selected items.
table.remove( items );
if ( table.getItemCount() == 0 ) {
TableItem item = new TableItem( table, SWT.NONE );
// Save undo infomation!
String[] stritem = getItemText( item );
ta = new TransAction();
ta.setNew( new String[][]{ stritem }, new int[]{ 0 } );
addUndo( ta );
}
/*
* try { table.getRow(); } catch(Exception e) // Index is too high: lower to last available value {
* setPosition(table.getItemCount()-1, 1); }
*/
setRowNums();
setModified();
}
private void setPosition( int rownr, int colnr ) {
activeTableColumn = colnr;
activeTableRow = rownr;
if ( rownr >= 0 ) {
activeTableItem = table.getItem( rownr );
}
}
public void edit( int rownr, int colnr ) {
setPosition( rownr, colnr );
edit( rownr, colnr, true, (char) 0 );
}
private void edit( int rownr, int colnr, boolean selectText, char extra ) {
selectionStart = -1;
TableItem row = table.getItem( rownr );
Control oldEditor = editor.getEditor();
if ( oldEditor != null && !oldEditor.isDisposed() ) {
try {
oldEditor.dispose();
} catch ( SWTException swte ) {
// Eat "Widget Is Disposed Exception" : did you ever!!!
}
}
activeTableItem = table.getItem( activeTableRow ); // just to make sure, clean
// up afterwards.
table.showItem( row );
table.setSelection( new TableItem[]{ row } );
if ( columns.length == 0 ) {
return;
}
switch ( columns[colnr - 1].getType() ) {
case ColumnInfo.COLUMN_TYPE_TEXT:
isTextButton = false;
editText( row, rownr, colnr, selectText, extra, columns[colnr - 1] );
break;
case ColumnInfo.COLUMN_TYPE_CCOMBO:
case ColumnInfo.COLUMN_TYPE_FORMAT:
editCombo( row, rownr, colnr );
break;
case ColumnInfo.COLUMN_TYPE_BUTTON:
editButton( row, rownr, colnr );
break;
case ColumnInfo.COLUMN_TYPE_TEXT_BUTTON:
if ( columns[colnr - 1].shouldRenderTextVarButton() ) {
isTextButton = true;
} else {
isTextButton = false;
}
editText( row, rownr, colnr, selectText, extra, columns[colnr - 1] );
break;
default:
break;
}
}
private String[] getItemText( TableItem row ) {
if ( row.isDisposed() ) {
return null;
}
String[] retval = new String[table.getColumnCount() - 1];
for ( int i = 0; i < retval.length; i++ ) {
retval[i] = row.getText( i + 1 );
}
return retval;
}
private void editText( TableItem row, final int rownr, final int colnr, boolean selectText, char extra,
ColumnInfo columnInfo ) {
beforeEdit = getItemText( row );
fieldChanged = false;
ColumnInfo colinfo = columns[colnr - 1];
if ( colinfo.isReadOnly() ) {
return;
}
if ( colinfo.getDisabledListener() != null ) {
boolean disabled = colinfo.getDisabledListener().isFieldDisabled( rownr );
if ( disabled ) {
return;
}
}
if ( text != null && !text.isDisposed() ) {
text.dispose();
}
if ( colinfo.getSelectionAdapter() != null ) {
Event e = new Event();
e.widget = this;
e.x = colnr;
e.y = rownr;
columns[colnr - 1].getSelectionAdapter().widgetSelected( new SelectionEvent( e ) );
return;
}
String content = row.getText( colnr ) + ( extra != 0 ? "" + extra : "" );
String tooltip = columns[colnr - 1].getToolTip();
final boolean useVariables = columns[colnr - 1].isUsingVariables();
final boolean passwordField = columns[colnr - 1].isPasswordField();
final ModifyListener modifyListener = new ModifyListener() {
@Override
public void modifyText( ModifyEvent me ) {
setColumnWidthBasedOnTextField( colnr, useVariables );
}
};
if ( useVariables ) {
GetCaretPositionInterface getCaretPositionInterface = new GetCaretPositionInterface() {
@Override
public int getCaretPosition() {
return ( (TextVar) text ).getTextWidget().getCaretPosition();
}
};
// The text widget will be disposed when we get here
// So we need to write to the table row
//
InsertTextInterface insertTextInterface = new InsertTextInterface() {
@Override
public void insertText( String string, int position ) {
StringBuilder buffer = new StringBuilder( table.getItem( rownr ).getText( colnr ) );
buffer.insert( position, string );
table.getItem( rownr ).setText( colnr, buffer.toString() );
int newPosition = position + string.length();
edit( rownr, colnr );
( (TextVar) text ).setSelection( newPosition );
( (TextVar) text ).showSelection();
setColumnWidthBasedOnTextField( colnr, useVariables );
}
};
final TextVar textWidget;
if ( passwordField ) {
textWidget = new PasswordTextVar( variables, table, SWT.NONE, getCaretPositionInterface, insertTextInterface );
} else if ( isTextButton ) {
textWidget =
new TextVarButton( variables, table, SWT.NONE, getCaretPositionInterface, insertTextInterface,
columnInfo.getTextVarButtonSelectionListener() );
} else {
textWidget = new TextVar( variables, table, SWT.NONE, getCaretPositionInterface, insertTextInterface );
}
text = textWidget;
textWidget.setText( content );
if ( lsMod != null ) {
textWidget.addModifyListener( lsMod );
}
textWidget.addModifyListener( lsUndo );
textWidget.setSelection( content.length() );
// last_carret_position = content.length();
textWidget.addKeyListener( lsKeyText );
// Make the column larger so we can still see the string we're entering...
textWidget.addModifyListener( modifyListener );
if ( selectText ) {
textWidget.selectAll();
}
if ( tooltip != null ) {
textWidget.setToolTipText( tooltip );
} else {
textWidget.setToolTipText( "" );
}
textWidget.addTraverseListener( lsTraverse );
textWidget.addFocusListener( lsFocusText );
} else {
Text textWidget = new Text( table, SWT.NONE );
text = textWidget;
textWidget.setText( content );
if ( lsMod != null ) {
textWidget.addModifyListener( lsMod );
}
textWidget.addModifyListener( lsUndo );
textWidget.setSelection( content.length() );
// last_carret_position = content.length();
textWidget.addKeyListener( lsKeyText );
// Make the column larger so we can still see the string we're entering...
textWidget.addModifyListener( modifyListener );
if ( selectText ) {
textWidget.selectAll();
}
if ( tooltip != null ) {
textWidget.setToolTipText( tooltip );
} else {
textWidget.setToolTipText( "" );
}
textWidget.addTraverseListener( lsTraverse );
textWidget.addFocusListener( lsFocusText );
}
props.setLook( text, Props.WIDGET_STYLE_TABLE );
int width = tablecolumn[colnr].getWidth();
int height = 30;
editor.horizontalAlignment = SWT.LEFT;
editor.grabHorizontal = true;
// Open the text editor in the correct column of the selected row.
editor.setEditor( text, row, colnr );
text.setFocus();
text.setSize( width, height );
editor.layout();
}
private void setColumnWidthBasedOnTextField( final int colnr, final boolean useVariables ) {
String str = getTextWidgetValue( colnr );
int strmax = TableView.dummyGC.textExtent( str, SWT.DRAW_TAB | SWT.DRAW_DELIMITER ).x + 20;
int colmax = tablecolumn[colnr].getWidth();
if ( strmax > colmax ) {
if ( Const.isOSX() || Const.isLinux() ) {
strmax *= 1.4;
}
tablecolumn[colnr].setWidth( strmax + 30 );
// On linux, this causes the text to select everything...
// This is because the focus is lost and re-gained. Nothing we can do
// about it now.
if ( useVariables ) {
TextVar widget = (TextVar) text;
int idx = widget.getTextWidget().getCaretPosition();
widget.selectAll();
widget.showSelection();
widget.setSelection( 0 );
widget.showSelection();
widget.setSelection( idx );
} else {
Text widget = (Text) text;
int idx = widget.getCaretPosition();
widget.selectAll();
widget.showSelection();
widget.setSelection( 0 );
widget.showSelection();
widget.setSelection( idx );
}
}
}
private String[] getComboValues( TableItem row, ColumnInfo colinfo ) {
if ( colinfo.getType() == ColumnInfo.COLUMN_TYPE_FORMAT ) {
int type = ValueMetaFactory.getIdForValueMeta( row.getText( colinfo.getFieldTypeColumn() ) );
switch ( type ) {
case ValueMetaInterface.TYPE_DATE:
return Const.getDateFormats();
case ValueMetaInterface.TYPE_INTEGER:
case ValueMetaInterface.TYPE_BIGNUMBER:
case ValueMetaInterface.TYPE_NUMBER:
return Const.getNumberFormats();
case ValueMetaInterface.TYPE_STRING:
return Const.getConversionFormats();
default:
return new String[0];
}
}
return colinfo.getComboValues();
}
private void editCombo( TableItem row, int rownr, int colnr ) {
beforeEdit = getItemText( row );
fieldChanged = false;
ColumnInfo colinfo = columns[colnr - 1];
if ( colinfo.isReadOnly() && colinfo.getSelectionAdapter() != null ) {
return;
}
if ( colinfo.getDisabledListener() != null ) {
boolean disabled = colinfo.getDisabledListener().isFieldDisabled( rownr );
if ( disabled ) {
return;
}
}
combo = new CCombo( table, colinfo.isReadOnly() ? SWT.READ_ONLY : SWT.NONE );
props.setLook( combo, Props.WIDGET_STYLE_TABLE );
combo.addTraverseListener( lsTraverse );
combo.addModifyListener( lsModCombo );
combo.addFocusListener( lsFocusCombo );
String[] opt = getComboValues( row, colinfo );
if ( colinfo.getComboValuesSelectionListener() != null ) {
opt = colinfo.getComboValuesSelectionListener().getComboValues( row, rownr, colnr );
}
combo.setItems( opt );
combo.setVisibleItemCount( opt.length );
combo.setText( row.getText( colnr ) );
if ( lsMod != null ) {
combo.addModifyListener( lsMod );
}
combo.addModifyListener( lsUndo );
combo.setToolTipText( colinfo.getToolTip() == null ? "" : colinfo.getToolTip() );
combo.setVisible( true );
combo.addKeyListener( lsKeyCombo );
if ( colinfo.getSelectionAdapter() != null ) {
combo.addSelectionListener( columns[colnr - 1].getSelectionAdapter() );
}
editor.horizontalAlignment = SWT.LEFT;
editor.layout();
// Open the text editor in the correct column of the selected row.
editor.setEditor( combo, row, colnr );
combo.setFocus();
combo.layout();
}
private void editButton( TableItem row, int rownr, int colnr ) {
beforeEdit = getItemText( row );
fieldChanged = false;
ColumnInfo colinfo = columns[colnr - 1];
if ( colinfo.isReadOnly() ) {
return;
}
if ( colinfo.getDisabledListener() != null ) {
boolean disabled = colinfo.getDisabledListener().isFieldDisabled( rownr );
if ( disabled ) {
return;
}
}
button = new Button( table, SWT.PUSH );
props.setLook( button, Props.WIDGET_STYLE_TABLE );
String buttonText = columns[colnr - 1].getButtonText();
if ( buttonText != null ) {
button.setText( buttonText );
}
button.setImage( GUIResource.getInstance().getImage( "ui/images/edittext.svg" ) );
SelectionListener selAdpt = colinfo.getSelectionAdapter();
if ( selAdpt != null ) {
button.addSelectionListener( selAdpt );
}
buttonRownr = rownr;
buttonColnr = colnr;
// button.addTraverseListener(lsTraverse);
buttonContent = row.getText( colnr );
String tooltip = columns[colnr - 1].getToolTip();
if ( tooltip != null ) {
button.setToolTipText( tooltip );
} else {
button.setToolTipText( "" );
}
button.addTraverseListener( lsTraverse ); // hop to next field
button.addTraverseListener( new TraverseListener() {
@Override
public void keyTraversed( TraverseEvent arg0 ) {
closeActiveButton();
}
} );
editor.horizontalAlignment = SWT.LEFT;
editor.verticalAlignment = SWT.TOP;
editor.grabHorizontal = false;
editor.grabVertical = false;
Point size = button.computeSize( SWT.DEFAULT, SWT.DEFAULT );
editor.minimumWidth = size.x;
editor.minimumHeight = size.y - 2;
// setRowNums();
editor.layout();
// Open the text editor in the correct column of the selected row.
editor.setEditor( button );
button.setFocus();
// if the button loses focus, destroy it...
/*
* button.addFocusListener(new FocusAdapter() { public void focusLost(FocusEvent e) { button.dispose(); } } );
*/
}
public void setRowNums() {
for ( int i = 0; i < table.getItemCount(); i++ ) {
TableItem item = table.getItem( i );
if ( item != null ) {
String num = "" + ( i + 1 );
// for(int j=num.length();j<3;j++) num="0"+num;
if ( !item.getText( 0 ).equals( num ) ) {
item.setText( 0, num );
}
}
}
}
public void optWidth( boolean header ) {
optWidth( header, 0 );
}
public void optWidth( boolean header, int nrLines ) {
for ( int c = 0; c < table.getColumnCount(); c++ ) {
TableColumn tc = table.getColumn( c );
int max = 0;
if ( header ) {
max = TableView.dummyGC.textExtent( tc.getText(), SWT.DRAW_TAB | SWT.DRAW_DELIMITER ).x;
// Check if the column has a sorted mark set. In that case, we need the
// header to be a bit wider...
//
if ( c == sortfield && sortable ) {
max += 15;
}
}
Set<String> columnStrings = new HashSet<String>();
boolean haveToGetTexts = false;
if ( c > 0 ) {
final ColumnInfo column = columns[c - 1];
if ( column != null ) {
switch ( column.getType() ) {
case ColumnInfo.COLUMN_TYPE_TEXT:
haveToGetTexts = true;
break;
case ColumnInfo.COLUMN_TYPE_CCOMBO:
case ColumnInfo.COLUMN_TYPE_FORMAT:
haveToGetTexts = true;
if ( column.getComboValues() != null ) {
for ( String comboValue : columns[c - 1].getComboValues() ) {
columnStrings.add( comboValue );
}
}
break;
case ColumnInfo.COLUMN_TYPE_BUTTON:
columnStrings.add( column.getButtonText() );
break;
default:
break;
}
}
} else {
haveToGetTexts = true;
}
if ( haveToGetTexts ) {
for ( int r = 0; r < table.getItemCount() && ( r < nrLines || nrLines <= 0 ); r++ ) {
TableItem ti = table.getItem( r );
if ( ti != null ) {
columnStrings.add( ti.getText( c ) );
}
}
}
for ( String str : columnStrings ) {
int len = TableView.dummyGC.textExtent( str == null ? "" : str, SWT.DRAW_TAB | SWT.DRAW_DELIMITER ).x;
if ( len > max ) {
max = len;
}
}
try {
int extra = 15;
if ( Const.isWindows() ) {
extra += 15;
} else if ( Const.isLinux() ) {
extra += 15;
}
// Platform specific code not needed any more with current version SWT
// if (Const.isOSX() || Const.isLinux()) max*=1.25;
if ( tc.getWidth() != max + extra ) {
if ( c > 0 || ( c == 0 && addIndexColumn ) ) {
if ( columns[c - 1].getWidth() == -1 ) {
tc.setWidth( max + extra );
} else {
tc.setWidth( columns[c - 1].getWidth() );
}
}
}
} catch ( Exception e ) {
// Ignore errors
}
}
unEdit();
}
/*
* Remove empty rows in the table...
*/
public void removeEmptyRows() {
removeEmptyRows( -1 );
}
private boolean isEmpty( int rownr, int colnr ) {
boolean empty = false;
TableItem item = table.getItem( rownr );
if ( item != null ) {
if ( colnr >= 0 ) {
String str = item.getText( colnr );
if ( str == null || str.length() == 0 ) {
empty = true;
}
} else {
empty = true;
for ( int j = 1; j < table.getColumnCount(); j++ ) {
String str = item.getText( j );
if ( str != null && str.length() > 0 ) {
empty = false;
}
}
}
}
return empty;
}
public void removeEmptyRows( int column ) {
// Remove "empty" table items, where item.getText(1) is empty, length==0
for ( int i = table.getItemCount() - 1; i >= 0; i-- ) {
if ( isEmpty( i, column ) ) {
table.remove( i );
}
}
if ( table.getItemCount() == 0 ) { // At least one empty row!
new TableItem( table, SWT.NONE );
}
}
private List<Integer> nonEmptyIndexes;
/**
* Count non-empty rows in the table... IMPORTANT: always call this method before calling getNonEmpty(int selnr): for
* performance reasons we cache the row indexes.
*
* @return the number of rows/table-items that are not empty
*/
public int nrNonEmpty() {
nonEmptyIndexes = new ArrayList<Integer>();
// Count only non-empty rows
for ( int i = 0; i < table.getItemCount(); i++ ) {
if ( !isEmpty( i, -1 ) ) {
nonEmptyIndexes.add( i );
}
}
return nonEmptyIndexes.size();
}
/**
* Return the row/table-item on the specified index. IMPORTANT: the indexes of the non-empty rows are populated with a
* call to nrNonEmpty(). Make sure to call that first.
*
* @param index the index of the non-empty row/table-item
* @return the requested non-empty row/table-item
*/
public TableItem getNonEmpty( int index ) {
int nonEmptyIndex = nonEmptyIndexes.get( index );
return table.getItem( nonEmptyIndex );
}
public int indexOfString( String str, int column ) {
int nrNonEmptyFields = nrNonEmpty();
for ( int i = 0; i < nrNonEmptyFields; i++ ) {
String cmp = getNonEmpty( i ).getText( column );
if ( str.equalsIgnoreCase( cmp ) ) {
return i;
}
}
return -1;
}
@Override
public ScrollBar getHorizontalBar() {
return table.getHorizontalBar();
}
@Override
public ScrollBar getVerticalBar() {
return table.getVerticalBar();
}
private void addUndo( TransAction ta ) {
while ( undo.size() > undoPosition + 1 && undo.size() > 0 ) {
int last = undo.size() - 1;
undo.remove( last );
}
undo.add( ta );
undoPosition++;
while ( undo.size() > props.getMaxUndo() ) {
undo.remove( 0 );
undoPosition--;
}
setUndoMenu();
}
private void undoAction() {
TransAction ta = previousUndo();
if ( ta == null ) {
return;
}
// Get the current cursor position
int rownr = getCurrentRownr();
setUndoMenu(); // something changed: change the menu
switch ( ta.getType() ) {
//
// NEW
//
// We created a table item: undo this...
case TransAction.TYPE_ACTION_NEW_TABLEITEM:
int[] idx = ta.getCurrentIndex();
table.remove( idx );
for ( int i = 0; i < idx.length; i++ ) {
if ( idx[i] < rownr ) {
rownr--; // shift with the rest.
}
}
// See if the table is empty, if so : undo again!!
if ( table.getItemCount() == 0 ) {
undoAction();
}
setRowNums();
break;
//
// DELETE
//
// un-Delete the rows at correct location: re-insert
case TransAction.TYPE_ACTION_DELETE_TABLEITEM:
idx = ta.getCurrentIndex();
String[][] str = (String[][]) ta.getCurrent();
for ( int i = 0; i < idx.length; i++ ) {
addItem( idx[i], str[i] );
if ( idx[i] <= rownr ) {
rownr++;
}
}
setRowNums();
break;
//
// CHANGE
//
// Change the item back to the original row-value.
case TransAction.TYPE_ACTION_CHANGE_TABLEITEM:
idx = ta.getCurrentIndex();
String[][] prev = (String[][]) ta.getPrevious();
for ( int x = 0; x < idx.length; x++ ) {
TableItem item = table.getItem( idx[x] );
for ( int i = 0; i < prev[x].length; i++ ) {
item.setText( i + 1, prev[x][i] );
}
}
break;
//
// POSITION
//
// The position of a row has changed...
case TransAction.TYPE_ACTION_POSITION_TABLEITEM:
int[] curr = ta.getCurrentIndex();
int[] prevIdx = ta.getPreviousIndex();
for ( int i = 0; i < curr.length; i++ ) {
moveRow( prevIdx[i], curr[i] );
}
setRowNums();
break;
default:
break;
}
if ( rownr >= table.getItemCount() ) {
rownr = table.getItemCount() - 1;
}
if ( rownr < 0 ) {
rownr = 0;
}
// cursor.setSelection(rownr, 0);
selectRows( rownr, rownr );
}
private void redoAction() {
TransAction ta = nextUndo();
if ( ta == null ) {
return;
}
// Get the current cursor position
int rownr = getCurrentRownr();
setUndoMenu(); // something changed: change the menu
switch ( ta.getType() ) {
//
// NEW
//
case TransAction.TYPE_ACTION_NEW_TABLEITEM:
int[] idx = ta.getCurrentIndex();
String[][] str = (String[][]) ta.getCurrent();
for ( int i = 0; i < idx.length; i++ ) {
addItem( idx[i], str[i] );
if ( idx[i] <= rownr ) {
rownr++; // Shift cursor position with the new items...
}
}
setRowNums();
break;
//
// DELETE
//
case TransAction.TYPE_ACTION_DELETE_TABLEITEM:
idx = ta.getCurrentIndex();
table.remove( idx );
for ( int i = 0; i < idx.length; i++ ) {
if ( idx[i] < rownr ) {
rownr--; // shift with the rest.
}
}
// See if the table is empty, if so : undo again!!
if ( table.getItemCount() == 0 ) {
undoAction();
}
setRowNums();
break;
//
// CHANGE
//
case TransAction.TYPE_ACTION_CHANGE_TABLEITEM:
idx = ta.getCurrentIndex();
String[][] curr = (String[][]) ta.getCurrent();
for ( int x = 0; x < idx.length; x++ ) {
TableItem item = table.getItem( idx[x] );
for ( int i = 0; i < curr[x].length; i++ ) {
item.setText( i + 1, curr[x][i] );
}
}
break;
//
// CHANGE POSITION
//
case TransAction.TYPE_ACTION_POSITION_TABLEITEM:
int[] currIdx = ta.getCurrentIndex();
int[] prev = ta.getPreviousIndex();
for ( int i = 0; i < currIdx.length; i++ ) {
moveRow( currIdx[i], prev[i] );
}
setRowNums();
break;
default:
break;
}
if ( rownr >= table.getItemCount() ) {
rownr = table.getItemCount() - 1;
}
if ( rownr < 0 ) {
rownr = 0;
}
// cursor.setSelection(rownr, 0);
selectRows( rownr, rownr );
}
private void setUndoMenu() {
TransAction prev = viewPreviousUndo();
TransAction next = viewNextUndo();
if ( miEditUndo.isDisposed() || miEditRedo.isDisposed() ) {
return;
}
if ( prev != null ) {
miEditUndo.setEnabled( true );
miEditUndo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.Undo", prev
.toString() ) ) );
} else {
miEditUndo.setEnabled( false );
miEditUndo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.UndoNotAvailable" ) ) );
}
if ( next != null ) {
miEditRedo.setEnabled( true );
miEditRedo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.Redo", next
.toString() ) ) );
} else {
miEditRedo.setEnabled( false );
miEditRedo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.RedoNotAvailable" ) ) );
}
}
// get previous undo, change position
private TransAction previousUndo() {
if ( undo.isEmpty() || undoPosition < 0 ) {
return null; // No undo left!
}
TransAction retval = undo.get( undoPosition );
undoPosition--;
return retval;
}
// View previous undo, don't change position
private TransAction viewPreviousUndo() {
if ( undo.isEmpty() || undoPosition < 0 ) {
return null; // No undo left!
}
TransAction retval = undo.get( undoPosition );
return retval;
}
private TransAction nextUndo() {
int size = undo.size();
if ( size == 0 || undoPosition >= size - 1 ) {
return null; // no redo left...
}
undoPosition++;
TransAction retval = undo.get( undoPosition );
return retval;
}
private TransAction viewNextUndo() {
int size = undo.size();
if ( size == 0 || undoPosition >= size - 1 ) {
return null; // no redo left...
}
TransAction retval = undo.get( undoPosition + 1 );
return retval;
}
private void clearUndo() {
undo = new ArrayList<TransAction>();
undoPosition = -1;
}
private Point getButtonPosition() {
return new Point( buttonColnr, buttonRownr );
}
public String getButtonString() {
return buttonContent;
}
public void setButtonString( String str ) {
Point p = getButtonPosition();
TableItem item = table.getItem( p.y );
item.setText( p.x, str );
}
public void closeActiveButton() {
if ( button != null && !button.isDisposed() ) {
button.dispose();
}
}
public void unEdit() {
if ( text != null && !text.isDisposed() ) {
text.dispose();
}
if ( combo != null && !combo.isDisposed() ) {
combo.dispose();
}
}
// Filtering...
public void setFilter() {
if ( condition == null ) {
condition = new Condition();
}
RowMetaInterface f = getRowWithoutValues();
EnterConditionDialog ecd = new EnterConditionDialog( parent.getShell(), SWT.NONE, f, condition );
Condition cond = ecd.open();
if ( cond != null ) {
ArrayList<Integer> tokeep = new ArrayList<Integer>();
// Apply the condition to the TableView...
int nr = table.getItemCount();
for ( int i = nr - 1; i >= 0; i-- ) {
RowMetaAndData r = getRow( i );
boolean keep = cond.evaluate( r.getRowMeta(), r.getData() );
if ( keep ) {
tokeep.add( Integer.valueOf( i ) );
}
}
int[] sels = new int[tokeep.size()];
for ( int i = 0; i < sels.length; i++ ) {
sels[i] = ( tokeep.get( i ) ).intValue();
}
table.setSelection( sels );
}
}
public RowMetaInterface getRowWithoutValues() {
RowMetaInterface f = new RowMeta();
f.addValueMeta( new ValueMetaInteger( "#" ) );
for ( int i = 0; i < columns.length; i++ ) {
f.addValueMeta( new ValueMetaString( columns[i].getName() ) );
}
return f;
}
public RowMetaAndData getRow( int nr ) {
TableItem ti = table.getItem( nr );
RowMetaInterface rowMeta = getRowWithoutValues();
Object[] rowData = new Object[rowMeta.size()];
rowData[0] = new Long( nr );
for ( int i = 1; i < rowMeta.size(); i++ ) {
rowData[i] = ti.getText( i );
}
return new RowMetaAndData( rowMeta, rowData );
}
public int[] getSelectionIndices() {
return table.getSelectionIndices();
}
public int getSelectionIndex() {
return table.getSelectionIndex();
}
public void remove( int index ) {
table.remove( index );
if ( table.getItemCount() == 0 ) {
new TableItem( table, SWT.NONE );
}
}
public void remove( int[] index ) {
table.remove( index );
if ( table.getItemCount() == 0 ) {
new TableItem( table, SWT.NONE );
}
}
public String getItem( int rownr, int colnr ) {
TableItem item = table.getItem( rownr );
if ( item != null ) {
return item.getText( colnr );
} else {
return null;
}
}
public void add( String... string ) {
TableItem item = new TableItem( table, SWT.NONE );
for ( int i = 0; i < string.length && i + 1 < table.getColumnCount(); i++ ) {
if ( string[i] != null ) {
item.setText( i + 1, string[i] );
}
}
}
public String[] getItem( int rownr ) {
TableItem item = table.getItem( rownr );
if ( item != null ) {
return getItemText( item );
} else {
return null;
}
}
/**
* Get all the strings from a certain column as an array
*
* @param colnr The column to return
* @return the column values as a string array.
*/
public String[] getItems( int colnr ) {
String[] retval = new String[table.getItemCount()];
for ( int i = 0; i < retval.length; i++ ) {
TableItem item = table.getItem( i );
retval[i] = item.getText( colnr + 1 );
}
return retval;
}
public void removeAll() {
table.removeAll();
if ( table.getItemCount() == 0 ) {
new TableItem( table, SWT.NONE );
}
}
public int getItemCount() {
return table.getItemCount();
}
public void setText( String text, int colnr, int rownr ) {
TableItem item = table.getItem( rownr );
item.setText( colnr, text );
}
/**
* @return Returns the readonly.
*/
public boolean isReadonly() {
return readonly;
}
/**
* @param readonly The readonly to set.
*/
public void setReadonly( boolean readonly ) {
this.readonly = readonly;
}
/**
* @return the sortable
*/
public boolean isSortable() {
return sortable;
}
/**
* @param sortable the sortable to set
*/
public void setSortable( boolean sortable ) {
this.sortable = sortable;
if ( !sortable ) {
table.setSortColumn( null );
} else {
table.setSortColumn( table.getColumn( sortfield ) );
}
}
public void setFocusOnFirstEditableField() {
// Look for the first field that can be edited...
int rownr = 0;
boolean gotOne = false;
for ( int colnr = 0; colnr < columns.length && !gotOne; colnr++ ) {
if ( !columns[colnr].isReadOnly() ) {
// edit this one...
gotOne = true;
activeTableItem = table.getItem( rownr );
activeTableColumn = colnr + 1;
edit( rownr, colnr + 1 );
}
}
}
/**
* @return the getSortField
*/
public int getSortField() {
return sortfield;
}
/**
* @return the sortingDescending
*/
public boolean isSortingDescending() {
return sortingDescending;
}
/**
* @param sortingDescending the sortingDescending to set
*/
public void setSortingDescending( boolean sortingDescending ) {
this.sortingDescending = sortingDescending;
}
public Table getTable() {
return table;
}
/**
* @return the numberColumn
*/
public ColumnInfo getNumberColumn() {
return numberColumn;
}
/**
* @param numberColumn the numberColumn to set
*/
public void setNumberColumn( ColumnInfo numberColumn ) {
this.numberColumn = numberColumn;
}
public TableEditor getEditor() {
return editor;
}
public void setEditor( TableEditor editor ) {
this.editor = editor;
}
public void applyOSXChanges() {
if ( text != null && !text.isDisposed() && lsFocusText != null ) {
lsFocusText.focusLost( null );
}
}
/**
* @return the showingBlueNullValues
*/
public boolean isShowingBlueNullValues() {
return showingBlueNullValues;
}
/**
* @param showingBlueNullValues the showingBlueNullValues to set
*/
public void setShowingBlueNullValues( boolean showingBlueNullValues ) {
this.showingBlueNullValues = showingBlueNullValues;
}
/**
* @return the lsContent
*/
public ModifyListener getContentListener() {
return lsContent;
}
/**
* @param lsContent the lsContent to set
*/
public void setContentListener( ModifyListener lsContent ) {
this.lsContent = lsContent;
}
/**
* @return the showingConversionErrorsInline
*/
public boolean isShowingConversionErrorsInline() {
return showingConversionErrorsInline;
}
/**
* @param showingConversionErrorsInline the showingConversionErrorsInline to set
*/
public void setShowingConversionErrorsInline( boolean showingConversionErrorsInline ) {
this.showingConversionErrorsInline = showingConversionErrorsInline;
}
/**
* Returns copy of columns array in order to prevent unintented modifications.
*
* @return columns array
*/
public ColumnInfo[] getColumns() {
return Arrays.copyOf( columns, columns.length );
}
public TableItem getActiveTableItem() {
return activeTableItem;
}
public int getActiveTableColumn() {
return activeTableColumn;
}
}
|
ui/src/org/pentaho/di/ui/core/widget/TableView.java
|
// CHECKSTYLE:FileLength:OFF
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.core.widget;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.DragSource;
import org.eclipse.swt.dnd.DragSourceEvent;
import org.eclipse.swt.dnd.DragSourceListener;
import org.eclipse.swt.dnd.TextTransfer;
import org.eclipse.swt.dnd.Transfer;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.ScrollBar;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Condition;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.undo.TransAction;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.dialog.EnterConditionDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
/**
* Widget to display or modify data, displayed in a Table format.
*
* @author Matt
* @since 27-05-2003
*/
public class TableView extends Composite {
private static Class<?> PKG = TableView.class; // for i18n purposes, needed by Translator2!!
private Composite parent;
private ColumnInfo[] columns;
private int rows;
private boolean readonly;
private int buttonRownr;
private int buttonColnr;
private String buttonContent;
private boolean previousShift;
private int selectionStart;
public Table table;
private TableEditor editor;
private TableColumn[] tablecolumn;
private PropsUI props;
private Control text;
private CCombo combo;
private Button button;
private TableItem activeTableItem;
private int activeTableColumn;
private int activeTableRow;
private KeyListener lsKeyText, lsKeyCombo;
private FocusAdapter lsFocusText, lsFocusCombo;
private ModifyListener lsModCombo;
private TraverseListener lsTraverse;
private int sortfield;
private int sortfieldLast;
private boolean sortingDescending;
private Boolean sortingDescendingLast;
private boolean sortable;
private int lastRowCount;
private boolean fieldChanged;
private Menu mRow;
private ModifyListener lsMod, lsUndo, lsContent;
private Clipboard clipboard;
// The following Image and Graphics Context are used for font metrics. We only
// want them created once.
private static Image dummyImage;
private static GC dummyGC;
private Font gridFont;
// private int last_carret_position;
private ArrayList<TransAction> undo;
private int undoPosition;
private String[] beforeEdit;
private MenuItem miEditUndo, miEditRedo;
private static final String CLIPBOARD_DELIMITER = "\t";
private Condition condition;
private Color defaultBackgroundColor;
private Map<String, Color> usedColors;
private ColumnInfo numberColumn;
protected int textWidgetCaretPosition;
private VariableSpace variables;
private boolean showingBlueNullValues;
private boolean showingConversionErrorsInline;
private boolean isTextButton = false;
private boolean addIndexColumn = true;
public TableView( VariableSpace space, Composite parent, int style, ColumnInfo[] columnInfo, int nrRows,
ModifyListener lsm, PropsUI pr ) {
this( space, parent, style, columnInfo, nrRows, false, lsm, pr );
}
public TableView( VariableSpace space, Composite parent, int style, ColumnInfo[] columnInfo, int nrRows,
boolean readOnly, ModifyListener lsm, PropsUI pr ) {
this( space, parent, style, columnInfo, nrRows, false, lsm, pr, true );
}
public TableView( VariableSpace space, Composite parent, int style, ColumnInfo[] columnInfo, int nrRows,
boolean readOnly, ModifyListener lsm, PropsUI pr, final boolean addIndexColumn ) {
super( parent, SWT.NO_BACKGROUND | SWT.NO_FOCUS | SWT.NO_MERGE_PAINTS | SWT.NO_RADIO_GROUP );
this.parent = parent;
this.columns = columnInfo;
this.rows = nrRows;
this.props = pr;
this.readonly = readOnly;
this.clipboard = null;
this.variables = space;
this.addIndexColumn = addIndexColumn;
sortfield = 0;
sortfieldLast = -1;
sortingDescending = false;
sortingDescendingLast = null;
sortable = true;
selectionStart = -1;
previousShift = false;
usedColors = new Hashtable<String, Color>();
condition = null;
lsMod = lsm;
clearUndo();
numberColumn = new ColumnInfo( "#", ColumnInfo.COLUMN_TYPE_TEXT, true, true );
ValueMetaInterface numberColumnValueMeta = new ValueMetaInteger( "#" );
numberColumnValueMeta.setConversionMask( "####0" );
numberColumn.setValueMeta( numberColumnValueMeta );
lsUndo = new ModifyListener() {
@Override
public void modifyText( ModifyEvent arg0 ) {
fieldChanged = true;
}
};
if ( TableView.dummyGC == null ) {
Display disp = parent.getDisplay();
TableView.dummyImage = new Image( disp, 1, 1 );
TableView.dummyGC = new GC( TableView.dummyImage );
gridFont = new Font( disp, props.getGridFont() );
TableView.dummyGC.setFont( gridFont );
}
FormLayout controlLayout = new FormLayout();
controlLayout.marginLeft = 0;
controlLayout.marginRight = 0;
controlLayout.marginTop = 0;
controlLayout.marginBottom = 0;
setLayout( controlLayout );
// setLayout(new GridLayout());
// Create table, add columns & rows...
table = new Table( this, style | SWT.MULTI );
props.setLook( table, Props.WIDGET_STYLE_TABLE );
table.setLinesVisible( true );
// table.setLayout(new FormLayout());
// table.setLayoutData(new GridData(GridData.FILL_BOTH));
FormData fdTable = new FormData();
fdTable.left = new FormAttachment( 0, 0 );
fdTable.right = new FormAttachment( 100, 0 );
fdTable.top = new FormAttachment( 0, 0 );
fdTable.bottom = new FormAttachment( 100, 0 );
table.setLayoutData( fdTable );
tablecolumn = new TableColumn[columns.length + 1];
tablecolumn[0] = new TableColumn( table, SWT.RIGHT );
tablecolumn[0].setResizable( true );
tablecolumn[0].setText( "#" );
tablecolumn[0].setWidth( addIndexColumn ? 25 : 0 );
tablecolumn[0].setAlignment( SWT.RIGHT );
for ( int i = 0; i < columns.length; i++ ) {
int allignment = columns[i].getAllignement();
tablecolumn[i + 1] = new TableColumn( table, allignment );
tablecolumn[i + 1].setResizable( true );
if ( columns[i].getName() != null ) {
tablecolumn[i + 1].setText( columns[i].getName() );
}
if ( columns[i].getToolTip() != null ) {
tablecolumn[i + 1].setToolTipText( ( columns[i].getToolTip() ) );
}
ValueMetaInterface valueMeta = columns[i].getValueMeta();
if ( valueMeta != null && valueMeta.isNumeric() ) {
tablecolumn[i + 1].setAlignment( SWT.RIGHT );
}
tablecolumn[i + 1].pack();
}
table.setHeaderVisible( true );
table.setLinesVisible( true );
// Set the default values...
if ( rows > 0 ) {
table.setItemCount( rows );
} else {
table.setItemCount( 1 );
}
// Get the background color of item 0, before anything happened with it,
// that's the default color.
defaultBackgroundColor = table.getItem( 0 ).getBackground();
setRowNums();
// Set the sort sign on the first column. (0)
table.setSortColumn( table.getColumn( sortfield ) );
table.setSortDirection( sortingDescending ? SWT.DOWN : SWT.UP );
// create a ControlEditor field to edit the contents of a cell
editor = new TableEditor( table );
editor.grabHorizontal = true;
editor.grabVertical = true;
mRow = new Menu( table );
MenuItem miRowInsBef = new MenuItem( mRow, SWT.NONE );
miRowInsBef.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.InsertBeforeRow" ) ) );
MenuItem miRowInsAft = new MenuItem( mRow, SWT.NONE );
miRowInsAft.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.InsertAfterRow" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miRowUp = new MenuItem( mRow, SWT.NONE );
miRowUp.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.MoveUp" ) ) );
MenuItem miRowDown = new MenuItem( mRow, SWT.NONE );
miRowDown.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.MoveDown" ) ) );
MenuItem miCol1 = new MenuItem( mRow, SWT.NONE );
miCol1.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.OptimalSizeWithHeader" ) ) );
MenuItem miCol2 = new MenuItem( mRow, SWT.NONE );
miCol2.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.OptimalSizeWithoutHeader" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miClear = new MenuItem( mRow, SWT.NONE );
miClear.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.ClearAll" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miSelAll = new MenuItem( mRow, SWT.NONE );
miSelAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.SelectAll" ) ) );
MenuItem miUnselAll = new MenuItem( mRow, SWT.NONE );
miUnselAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.ClearSelection" ) ) );
MenuItem miFilter = new MenuItem( mRow, SWT.NONE );
miFilter.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.FilteredSelection" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miClipAll = new MenuItem( mRow, SWT.NONE );
miClipAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.CopyToClipboard" ) ) );
MenuItem miPasteAll = new MenuItem( mRow, SWT.NONE );
miPasteAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.PasteFromClipboard" ) ) );
MenuItem miCutAll = new MenuItem( mRow, SWT.NONE );
miCutAll
.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.CutSelected" ) ) );
MenuItem miDelAll = new MenuItem( mRow, SWT.NONE );
miDelAll.setText( OsHelper.customizeMenuitemText( BaseMessages
.getString( PKG, "TableView.menu.DeleteSelected" ) ) );
MenuItem miKeep = new MenuItem( mRow, SWT.NONE );
miKeep
.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.KeepSelected" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
MenuItem miCopyToAll = new MenuItem( mRow, SWT.NONE );
miCopyToAll.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.CopyFieldToAllRows" ) ) );
new MenuItem( mRow, SWT.SEPARATOR );
miEditUndo = new MenuItem( mRow, SWT.NONE );
miEditRedo = new MenuItem( mRow, SWT.NONE );
setUndoMenu();
if ( readonly ) {
miRowInsBef.setEnabled( false );
miRowInsAft.setEnabled( false );
miRowUp.setEnabled( false );
miRowDown.setEnabled( false );
miClear.setEnabled( false );
miCopyToAll.setEnabled( false );
miPasteAll.setEnabled( false );
miDelAll.setEnabled( false );
miCutAll.setEnabled( false );
miKeep.setEnabled( false );
}
SelectionAdapter lsRowInsBef = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
insertRowBefore();
}
};
SelectionAdapter lsRowInsAft = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
insertRowAfter();
}
};
SelectionAdapter lsCol1 = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
optWidth( true );
}
};
SelectionAdapter lsCol2 = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
optWidth( false );
}
};
SelectionAdapter lsRowUp = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
moveRows( -1 );
}
};
SelectionAdapter lsRowDown = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
moveRows( +1 );
}
};
SelectionAdapter lsClear = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
clearAll( true );
}
};
SelectionAdapter lsClipAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
clipSelected();
}
};
SelectionAdapter lsCopyToAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
copyToAll();
}
};
SelectionAdapter lsSelAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
selectAll();
}
};
SelectionAdapter lsUnselAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
unselectAll();
}
};
SelectionAdapter lsPasteAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
pasteSelected();
}
};
SelectionAdapter lsCutAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
cutSelected();
}
};
SelectionAdapter lsDelAll = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
delSelected();
}
};
SelectionAdapter lsKeep = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
keepSelected();
}
};
SelectionAdapter lsFilter = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
setFilter();
}
};
SelectionAdapter lsEditUndo = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
undoAction();
}
};
SelectionAdapter lsEditRedo = new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent e ) {
redoAction();
}
};
miRowInsBef.addSelectionListener( lsRowInsBef );
miRowInsAft.addSelectionListener( lsRowInsAft );
miCol1.addSelectionListener( lsCol1 );
miCol2.addSelectionListener( lsCol2 );
miRowUp.addSelectionListener( lsRowUp );
miRowDown.addSelectionListener( lsRowDown );
miClear.addSelectionListener( lsClear );
miClipAll.addSelectionListener( lsClipAll );
miCopyToAll.addSelectionListener( lsCopyToAll );
miSelAll.addSelectionListener( lsSelAll );
miUnselAll.addSelectionListener( lsUnselAll );
miPasteAll.addSelectionListener( lsPasteAll );
miCutAll.addSelectionListener( lsCutAll );
miDelAll.addSelectionListener( lsDelAll );
miKeep.addSelectionListener( lsKeep );
miFilter.addSelectionListener( lsFilter );
miEditUndo.addSelectionListener( lsEditUndo );
miEditRedo.addSelectionListener( lsEditRedo );
table.setMenu( mRow );
lsFocusText = new FocusAdapter() {
@Override
public void focusLost( FocusEvent e ) {
final Display d = Display.getCurrent();
if ( table.isDisposed() ) {
return;
}
final TableItem row = activeTableItem;
if ( row == null ) {
return;
}
final int colnr = activeTableColumn;
final int rownr = table.indexOf( row );
final Control ftext = text;
final String[] fBeforeEdit = beforeEdit;
// Save the position of the caret for the focus-dropping popup-dialogs
// The content is then in contentDestination
textWidgetCaretPosition = getTextWidgetCaretPosition( colnr );
final String value = getTextWidgetValue( colnr );
final Runnable worker = new Runnable() {
@Override
public void run() {
try {
if ( row.isDisposed() ) {
return;
}
row.setText( colnr, value );
ftext.dispose();
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ fBeforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
} catch ( Exception ignored ) {
// widget is disposed, ignore
}
}
};
// force the immediate update
if ( !row.isDisposed() ) {
row.setText( colnr, value );
}
if ( columns[colnr - 1].getType() == ColumnInfo.COLUMN_TYPE_TEXT_BUTTON ) {
try {
Thread.sleep( 500 );
} catch ( InterruptedException ignored ) {
}
Runnable r = new Runnable() {
@Override
public void run() {
d.asyncExec( worker );
}
};
Thread t = new Thread( r );
t.start();
} else {
worker.run();
}
}
};
lsFocusCombo = new FocusAdapter() {
@Override
public void focusLost( FocusEvent e ) {
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int colnr = activeTableColumn;
int rownr = table.indexOf( row );
if ( colnr > 0 ) {
try {
row.setText( colnr, combo.getText() );
} catch ( Exception exc ) {
// Eat widget disposed error
}
String[] afterEdit = getItemText( row );
if ( afterEdit != null ) {
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
}
}
combo.dispose();
}
};
lsModCombo = new ModifyListener() {
@Override
public void modifyText( ModifyEvent e ) {
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int colnr = activeTableColumn;
int rownr = table.indexOf( row );
row.setText( colnr, combo.getText() );
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
}
};
// Catch the keys pressed when editing a Text-field...
lsKeyText = new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
boolean right = false;
boolean left = false;
/*
* left = e.keyCode == SWT.ARROW_LEFT && last_carret_position==0;
*
* if (text!=null && !text.isDisposed()) right = e.keyCode == SWT.ARROW_RIGHT &&
* last_carret_position==text.getText().length();
*/
// "ENTER": close the text editor and copy the data over
// We edit the data after moving to another cell, only if editNextCell =
// true;
if ( e.character == SWT.CR
|| e.keyCode == SWT.ARROW_DOWN || e.keyCode == SWT.ARROW_UP || e.keyCode == SWT.TAB || left || right ) {
if ( activeTableItem == null ) {
return;
}
applyTextChange( activeTableItem, activeTableRow, activeTableColumn );
int maxcols = table.getColumnCount();
int maxrows = table.getItemCount();
boolean editNextCell = false;
if ( e.keyCode == SWT.ARROW_DOWN && activeTableRow < maxrows - 1 ) {
activeTableRow++;
editNextCell = true;
}
if ( e.keyCode == SWT.ARROW_UP && activeTableRow > 0 ) {
activeTableRow--;
editNextCell = true;
}
// TAB
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) == 0 ) ) || right ) {
activeTableColumn++;
editNextCell = true;
}
// Shift Tab
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) != 0 ) ) || left ) {
activeTableColumn--;
editNextCell = true;
}
if ( activeTableColumn < 1 ) { // from SHIFT-TAB
activeTableColumn = maxcols - 1;
if ( activeTableRow > 0 ) {
activeTableRow--;
}
}
if ( activeTableColumn >= maxcols ) { // from TAB
activeTableColumn = 1;
activeTableRow++;
}
// Tab beyond last line: add a line to table!
if ( activeTableRow >= maxrows ) {
TableItem item = new TableItem( table, SWT.NONE, activeTableRow );
item.setText( 1, "" );
setRowNums();
}
activeTableItem = table.getItem( activeTableRow ); // just to make sure!
if ( editNextCell ) {
edit( activeTableRow, activeTableColumn );
} else {
if ( e.keyCode == SWT.ARROW_DOWN && activeTableRow == maxrows - 1 ) {
insertRowAfter();
}
}
} else if ( e.keyCode == SWT.ESC ) {
text.dispose();
// setFocus();
table.setFocus();
}
// last_carret_position = text.isDisposed()?-1:text.getCaretPosition();
}
};
// Catch the keys pressed when editing a Combo field
lsKeyCombo = new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
boolean ctrl = ( ( e.stateMask & SWT.MOD1 ) != 0 );
// CTRL-V --> Paste selected infomation...
if ( e.keyCode == 'v' && ctrl ) {
e.doit = false;
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
clipboard = new Clipboard( getDisplay() );
TextTransfer tran = TextTransfer.getInstance();
String text = (String) clipboard.getContents( tran );
combo.setText( text );
return;
}
boolean right = false;
boolean left = false;
// "ENTER": close the text editor and copy the data over
if ( e.keyCode == SWT.CR || e.keyCode == SWT.TAB || left || right ) {
if ( activeTableItem == null ) {
return;
}
applyComboChange( activeTableItem, activeTableRow, activeTableColumn );
String[] afterEdit = getItemText( activeTableItem );
checkChanged(
new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ activeTableRow } );
int maxcols = table.getColumnCount();
int maxrows = table.getItemCount();
boolean sel = false;
// TAB
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) == 0 ) ) || right ) {
activeTableColumn++;
sel = true;
}
// Shift Tab
if ( ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) != 0 ) ) || right ) {
activeTableColumn--;
sel = true;
}
if ( activeTableColumn < 1 ) { // from SHIFT-TAB
activeTableColumn = maxcols - 1;
if ( activeTableRow > 0 ) {
activeTableRow--;
}
}
if ( activeTableColumn >= maxcols ) { // from TAB
activeTableColumn = 1;
activeTableRow++;
}
// Tab beyond last line: add a line to table!
if ( activeTableRow >= maxrows ) {
TableItem item = new TableItem( table, SWT.NONE, activeTableRow );
item.setText( 1, "" );
setRowNums();
}
if ( sel ) {
edit( activeTableRow, activeTableColumn );
}
table.setFocus();
} else if ( e.keyCode == SWT.ESC ) {
if ( activeTableItem != null ) {
activeTableItem.setText( activeTableColumn, beforeEdit[activeTableColumn - 1] );
}
combo.dispose();
table.setFocus();
e.doit = false;
}
// last_carret_position = combo.isDisposed()?-1:0;
}
};
/*
* It seems there is an other keyListener active to help control the cursor. There is support for keys like
* LEFT/RIGHT/UP/DOWN/HOME/END/etc It presents us with a problem because we only get the position of the row/column
* AFTER the other listener did it's job. Therefor we added global variables prev_rownr and prev_colnr
*/
KeyListener lsKeyTable = new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
if ( activeTableItem == null ) {
return;
}
int maxcols = table.getColumnCount();
int maxrows = table.getItemCount();
boolean shift = ( e.stateMask & SWT.SHIFT ) != 0;
if ( !previousShift && shift || selectionStart < 0 ) {
// Shift is pressed down: reset start of selection
// No start of selection known? reset as well.
selectionStart = activeTableRow;
}
previousShift = shift;
boolean ctrl = ( ( e.stateMask & SWT.MOD1 ) != 0 );
// Move rows up or down shortcuts...
if ( !readonly && e.keyCode == SWT.ARROW_DOWN && ctrl ) {
moveRows( +1 );
e.doit = false;
return;
}
if ( !readonly && e.keyCode == SWT.ARROW_UP && ctrl ) {
moveRows( -1 );
e.doit = false;
return;
}
// Select extra row down
if ( e.keyCode == SWT.ARROW_DOWN && shift ) {
activeTableRow++;
if ( activeTableRow >= maxrows ) {
activeTableRow = maxrows - 1;
}
selectRows( selectionStart, activeTableRow );
// activeTableItem = table.getItem(activeTableRow);
table.showItem( table.getItem( activeTableRow ) );
e.doit = false;
return;
}
// Select extra row up
if ( e.keyCode == SWT.ARROW_UP && shift ) {
activeTableRow--;
if ( activeTableRow < 0 ) {
activeTableRow = 0;
}
selectRows( activeTableRow, selectionStart );
// activeTableItem = table.getItem(activeTableRow);
table.showItem( table.getItem( activeTableRow ) );
e.doit = false;
return;
}
// Select all rows until end
if ( e.keyCode == SWT.HOME && shift ) {
activeTableRow = 0;
// Select all indeces from "from_selection" to "row"
selectRows( selectionStart, activeTableRow );
table.showItem( activeTableItem );
e.doit = false;
return;
}
// Select extra row up
if ( e.keyCode == SWT.END && shift ) {
activeTableRow = maxrows;
selectRows( selectionStart, activeTableRow );
table.showItem( activeTableItem );
e.doit = false;
return;
}
// Move cursor: set selection on the row in question.
if ( ( e.keyCode == SWT.ARROW_DOWN && !shift )
|| ( e.keyCode == SWT.ARROW_UP && !shift ) || ( e.keyCode == SWT.HOME && !shift )
|| ( e.keyCode == SWT.END && !shift ) ) {
switch ( e.keyCode ) {
case SWT.ARROW_DOWN:
activeTableRow++;
if ( activeTableRow >= maxrows ) {
if ( !readonly ) {
insertRowAfter();
} else {
activeTableRow = maxrows - 1;
}
}
break;
case SWT.ARROW_UP:
activeTableRow--;
if ( activeTableRow < 0 ) {
activeTableRow = 0;
}
break;
case SWT.HOME:
activeTableRow = 0;
break;
case SWT.END:
activeTableRow = maxrows - 1;
break;
default:
break;
}
setPosition( activeTableRow, activeTableColumn );
table.deselectAll();
table.select( activeTableRow );
table.showItem( table.getItem( activeTableRow ) );
e.doit = false;
return;
}
// CTRL-A --> Select All lines
if ( e.keyCode == 'a' && ctrl ) {
e.doit = false;
selectAll();
return;
}
// ESC --> unselect all
if ( e.keyCode == SWT.ESC ) {
e.doit = false;
unselectAll();
selectRows( activeTableRow, activeTableRow );
setFocus();
// table.setFocus();
return;
}
// CTRL-C --> Copy selected lines to clipboard
if ( e.keyCode == 'c' && ctrl ) {
e.doit = false;
clipSelected();
return;
}
// CTRL-K --> keep only selected lines
if ( !readonly && e.keyCode == 'k' && ctrl ) {
e.doit = false;
keepSelected();
return;
}
// CTRL-X --> Cut selected infomation...
if ( !readonly && e.keyCode == 'x' && ctrl ) {
e.doit = false;
cutSelected();
return;
}
// CTRL-V --> Paste selected infomation...
if ( !readonly && e.keyCode == 'v' && ctrl ) {
e.doit = false;
pasteSelected();
return;
}
// F3 --> optimal width including headers
if ( e.keyCode == SWT.F3 ) {
e.doit = false;
optWidth( true );
return;
}
// DEL --> delete selected lines
if ( !readonly && e.keyCode == SWT.DEL ) {
e.doit = false;
delSelected();
return;
}
// F4 --> optimal width excluding headers
if ( e.keyCode == SWT.F4 ) {
e.doit = false;
optWidth( false );
return;
}
// CTRL-Y --> redo action
if ( e.keyCode == 'y' && ctrl ) {
e.doit = false;
redoAction();
return;
}
// CTRL-Z --> undo action
if ( e.keyCode == 'z' && ctrl ) {
e.doit = false;
undoAction();
return;
}
// Return: edit the first field in the row.
if ( e.keyCode == SWT.CR || e.keyCode == SWT.ARROW_RIGHT || e.keyCode == SWT.TAB ) {
activeTableColumn = 1;
edit( activeTableRow, activeTableColumn );
e.doit = false;
return;
}
if ( activeTableColumn > 0 ) {
boolean textChar =
( e.character >= 'a' && e.character <= 'z' )
|| ( e.character >= 'A' && e.character <= 'Z' ) || ( e.character >= '0' && e.character <= '9' )
|| ( e.character == ' ' ) || ( e.character == '_' ) || ( e.character == ',' )
|| ( e.character == '.' ) || ( e.character == '+' ) || ( e.character == '-' )
|| ( e.character == '*' ) || ( e.character == '/' ) || ( e.character == ';' );
// setSelection(row, rownr, colnr);
// character a-z, A-Z, 0-9: start typing...
if ( e.character == SWT.CR || e.keyCode == SWT.F2 || textChar ) {
boolean selectText = true;
char extraChar = 0;
if ( textChar ) {
extraChar = e.character;
selectText = false;
}
e.doit = false;
edit( activeTableRow, activeTableColumn, selectText, extraChar );
}
if ( e.character == SWT.TAB ) {
// TAB
if ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) == 0 ) ) {
activeTableColumn++;
}
// Shift Tab
if ( e.keyCode == SWT.TAB && ( ( e.stateMask & SWT.SHIFT ) != 0 ) ) {
activeTableColumn--;
}
if ( activeTableColumn < 1 ) { // from SHIFT-TAB
activeTableColumn = maxcols - 1;
if ( activeTableRow > 0 ) {
activeTableRow--;
}
}
if ( activeTableColumn >= maxcols ) { // from TAB
activeTableColumn = 1;
activeTableRow++;
}
// Tab beyond last line: add a line to table!
if ( activeTableRow >= maxrows ) {
TableItem item = new TableItem( table, SWT.NONE, activeTableRow );
item.setText( 1, "" );
setRowNums();
}
// row = table.getItem(rownr);
e.doit = false;
edit( activeTableRow, activeTableColumn );
}
}
setFocus();
table.setFocus();
}
};
table.addKeyListener( lsKeyTable );
// Table listens to the mouse:
MouseAdapter lsMouseT = new MouseAdapter() {
@Override
public void mouseDown( MouseEvent event ) {
if ( activeTableItem != null
&& editor != null
&& editor.getEditor() != null
&& !editor.getEditor().isDisposed() ) {
if ( activeTableColumn > 0 ) {
switch ( columns[activeTableColumn - 1].getType() ) {
case ColumnInfo.COLUMN_TYPE_TEXT:
applyTextChange( activeTableItem, activeTableRow, activeTableColumn );
break;
case ColumnInfo.COLUMN_TYPE_CCOMBO:
applyComboChange( activeTableItem, activeTableRow, activeTableColumn );
break;
}
}
}
//if ( event.button == 1 ) {
boolean rightClick = event.button == 3;
if ( event.button == 1 || rightClick ) {
boolean shift = ( event.stateMask & SWT.SHIFT ) != 0;
boolean control = ( event.stateMask & SWT.MOD1 ) != 0;
if ( !shift && !control ) {
Rectangle clientArea = table.getClientArea();
Point pt = new Point( event.x, event.y );
int index = table.getTopIndex();
while ( index < table.getItemCount() ) {
boolean visible = false;
final TableItem item = table.getItem( index );
for ( int i = 0; i < table.getColumnCount(); i++ ) {
Rectangle rect = item.getBounds( i );
if ( i == 0 ) {
rect.width = rect.x;
rect.x = 0;
}
if ( rect.contains( pt ) ) {
activeTableItem = item;
activeTableColumn = i;
activeTableRow = index;
if ( !rightClick ) {
editSelected();
}
return;
} else {
if ( i == table.getColumnCount() - 1 && // last column
pt.x > rect.x + rect.width && // to the right
pt.y >= rect.y && pt.y <= rect.y + rect.height // same
// height
// as this
// visible
// item
) {
return; // don't do anything when clicking to the right of
// the grid.
}
}
if ( !visible && rect.intersects( clientArea ) ) {
visible = true;
}
}
if ( !visible ) {
return;
}
index++;
}
if ( rightClick ) {
return;
}
// OK, so they clicked in the table and we did not go into the
// invisible: below the last line!
// Position on last row, 1st column and add a new line...
setPosition( table.getItemCount() - 1, 1 );
insertRowAfter();
}
}
}
};
table.addMouseListener( lsMouseT );
// Add support for sorted columns!
//
final int nrcols = tablecolumn.length;
for ( int i = 0; i < nrcols; i++ ) {
final int colnr = i;
Listener lsSort = new Listener() {
@Override
public void handleEvent( Event e ) {
// Sorting means: clear undo information!
clearUndo();
sortTable( colnr );
}
};
tablecolumn[i].addListener( SWT.Selection, lsSort );
}
lsTraverse = new TraverseListener() {
@Override
public void keyTraversed( TraverseEvent e ) {
e.doit = false;
}
};
table.addTraverseListener( lsTraverse );
// cursor.addTraverseListener(lsTraverse);
// Clean up the clipboard
addDisposeListener( new DisposeListener() {
@Override
public void widgetDisposed( DisposeEvent e ) {
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
if ( gridFont != null ) {
gridFont.dispose();
}
}
} );
// Drag & drop source!
// Drag & Drop for table-viewer
Transfer[] ttypes = new Transfer[]{ TextTransfer.getInstance() };
DragSource ddSource = new DragSource( table, DND.DROP_MOVE | DND.DROP_COPY );
ddSource.setTransfer( ttypes );
ddSource.addDragListener( new DragSourceListener() {
@Override
public void dragStart( DragSourceEvent event ) {
}
@Override
public void dragSetData( DragSourceEvent event ) {
event.data = "TableView" + Const.CR + getSelectedText();
}
@Override
public void dragFinished( DragSourceEvent event ) {
}
} );
table.layout();
table.pack();
optWidth( true );
layout();
pack();
}
protected String getTextWidgetValue( int colNr ) {
boolean b = columns[colNr - 1].isUsingVariables();
if ( b ) {
return ( (TextVar) text ).getText();
} else {
return ( (Text) text ).getText();
}
}
protected int getTextWidgetCaretPosition( int colNr ) {
boolean b = columns[colNr - 1].isUsingVariables();
if ( b ) {
return ( (TextVar) text ).getTextWidget().getCaretPosition();
} else {
return ( (Text) text ).getCaretPosition();
}
}
public void sortTable( int colnr ) {
if ( !sortable ) {
return;
}
if ( sortfield == colnr ) {
sortingDescending = ( !sortingDescending );
} else {
sortfield = colnr;
sortingDescending = false;
}
sortTable( sortfield, sortingDescending );
}
public void setSelection( int[] selectedItems ) {
table.select( selectedItems );
}
public void sortTable( int sortField, boolean sortingDescending ) {
boolean shouldRefresh = false;
if ( this.sortfieldLast == -1 && this.sortingDescendingLast == null ) {
// first time through, so update
shouldRefresh = true;
this.sortfieldLast = this.sortfield;
this.sortingDescendingLast = new Boolean( this.sortingDescending );
this.sortfield = sortField;
this.sortingDescending = sortingDescending;
}
if ( sortfieldLast != this.sortfield ) {
this.sortfieldLast = this.sortfield;
this.sortfield = sortField;
shouldRefresh = true;
}
if ( sortingDescendingLast != this.sortingDescending ) {
this.sortingDescendingLast = this.sortingDescending;
this.sortingDescending = sortingDescending;
shouldRefresh = true;
}
if ( !shouldRefresh && table.getItemCount() == lastRowCount ) {
return;
}
try {
// First, get all info and put it in a Vector of Rows...
TableItem[] items = table.getItems();
List<Object[]> v = new ArrayList<Object[]>();
// First create the row metadata for the grid
//
final RowMetaInterface rowMeta = new RowMeta();
// First values are the color name + value!
rowMeta.addValueMeta( new ValueMetaString( "colorname" ) );
rowMeta.addValueMeta( new ValueMetaInteger( "color" ) );
for ( int j = 0; j < table.getColumnCount(); j++ ) {
ColumnInfo colInfo;
if ( j > 0 ) {
colInfo = columns[j - 1];
} else {
colInfo = numberColumn;
}
ValueMetaInterface valueMeta = colInfo.getValueMeta();
if ( j == sortField ) {
valueMeta.setSortedDescending( sortingDescending );
}
rowMeta.addValueMeta( valueMeta );
}
final RowMetaInterface sourceRowMeta = rowMeta.cloneToType( ValueMetaInterface.TYPE_STRING );
final RowMetaInterface conversionRowMeta = rowMeta.clone();
// Set it all to string...
// Also set the storage value metadata: this will allow us to convert back
// and forth without a problem.
//
for ( int i = 0; i < sourceRowMeta.size(); i++ ) {
ValueMetaInterface sourceValueMeta = sourceRowMeta.getValueMeta( i );
sourceValueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
ValueMetaInterface conversionMetaData = conversionRowMeta.getValueMeta( i );
conversionMetaData.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
// Meaning: this string comes from an Integer/Number/Date/etc.
//
sourceRowMeta.getValueMeta( i ).setConversionMetadata( conversionMetaData );
}
// Now populate a list of data rows...
//
for ( int i = 0; i < items.length; i++ ) {
TableItem item = items[i];
Object[] r = new Object[table.getColumnCount() + 2];
// First values are the color name + value!
Color bg = item.getBackground();
if ( !bg.equals( defaultBackgroundColor ) ) {
String colorName = "bg " + bg.toString();
r[0] = colorName;
r[1] = new Long( ( bg.getRed() << 16 ) + ( bg.getGreen() << 8 ) + ( bg.getBlue() ) );
// Save it in the used colors map!
usedColors.put( colorName, bg );
}
for ( int j = 0; j < table.getColumnCount(); j++ ) {
String data = item.getText( j );
if ( GUIResource.getInstance().getColorBlue().equals( item.getForeground( j ) ) ) {
data = null;
}
ValueMetaInterface sourceValueMeta = sourceRowMeta.getValueMeta( j + 2 );
try {
r[j + 2] = sourceValueMeta.convertDataUsingConversionMetaData( data );
} catch ( Exception e ) {
if ( isShowingConversionErrorsInline() ) {
r[j + 2] = Const.getStackTracker( e );
} else {
throw e;
}
}
}
v.add( r );
}
final int[] sortIndex = new int[]{ sortField + 2 };
// Sort the vector!
Collections.sort( v, new Comparator<Object[]>() {
@Override
public int compare( Object[] r1, Object[] r2 ) {
try {
return conversionRowMeta.compare( r1, r2, sortIndex );
} catch ( KettleValueException e ) {
throw new RuntimeException( "Error comparing rows", e );
}
}
} );
// Clear the table
table.removeAll();
// Refill the table
for ( int i = 0; i < v.size(); i++ ) {
Object[] r = v.get( i );
TableItem item = new TableItem( table, SWT.NONE );
String colorName = (String) r[0];
Long colorValue = (Long) r[1];
if ( colorValue != null ) {
// Get it from the map
//
Color bg = usedColors.get( colorName );
if ( bg != null ) {
item.setBackground( bg );
}
}
for ( int j = 2; j < r.length; j++ ) {
String string = conversionRowMeta.getString( r, j );
if ( showingBlueNullValues && string == null ) {
string = "<null>";
item.setForeground( j - 2, GUIResource.getInstance().getColorBlue() );
} else {
item.setForeground( j - 2, GUIResource.getInstance().getColorBlack() );
}
if ( string != null ) {
item.setText( j - 2, string );
}
}
}
table.setSortColumn( table.getColumn( sortfield ) );
table.setSortDirection( sortingDescending ? SWT.DOWN : SWT.UP );
lastRowCount = table.getItemCount();
} catch ( Exception e ) {
new ErrorDialog( this.getShell(), BaseMessages.getString( PKG, "TableView.ErrorDialog.title" ), BaseMessages
.getString( PKG, "TableView.ErrorDialog.description" ), e );
}
}
private void selectRows( int from, int to ) {
table.deselectAll();
if ( from == to ) {
table.select( from );
} else {
if ( from > to ) {
table.select( to, from );
} else {
table.select( from, to );
}
}
}
private void applyTextChange( TableItem row, int rownr, int colnr ) {
String textData = getTextWidgetValue( colnr );
row.setText( colnr, textData );
text.dispose();
table.setFocus();
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
selectionStart = -1;
fireContentChangedListener( rownr, colnr, textData );
}
/**
* Inform the content listener that content changed.
*
* @param rownr
* @param colnr
* @param textData
*/
private void fireContentChangedListener( int rownr, int colnr, String textData ) {
if ( lsContent != null ) {
Event event = new Event();
event.data = textData;
event.widget = table;
event.x = rownr;
event.y = colnr;
lsContent.modifyText( new ModifyEvent( event ) );
}
}
private void applyComboChange( TableItem row, int rownr, int colnr ) {
String textData = combo.getText();
row.setText( colnr, textData );
combo.dispose();
String[] afterEdit = getItemText( row );
checkChanged( new String[][]{ beforeEdit }, new String[][]{ afterEdit }, new int[]{ rownr } );
selectionStart = -1;
fireContentChangedListener( rownr, colnr, textData );
}
public void addModifyListener( ModifyListener ls ) {
lsMod = ls;
}
public void setColumnInfo( int idx, ColumnInfo col ) {
columns[idx] = col;
}
public void setColumnText( int idx, String text ) {
TableColumn col = table.getColumn( idx );
col.setText( text );
}
public void setColumnToolTip( int idx, String text ) {
columns[idx].setToolTip( text );
}
private void editSelected() {
if ( activeTableItem == null ) {
return;
}
if ( activeTableColumn > 0 ) {
edit( activeTableRow, activeTableColumn );
} else {
selectRows( activeTableRow, activeTableRow );
}
}
private void checkChanged( String[][] before, String[][] after, int[] index ) {
// Did we change anything: if so, add undo information
if ( fieldChanged ) {
TransAction ta = new TransAction();
ta.setChanged( before, after, index );
addUndo( ta );
}
}
private void setModified() {
if ( lsMod != null ) {
Event e = new Event();
e.widget = this;
lsMod.modifyText( new ModifyEvent( e ) );
}
}
private void insertRowBefore() {
if ( readonly ) {
return;
}
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int rownr = table.indexOf( row );
TableItem item = new TableItem( table, SWT.NONE, rownr );
item.setText( 1, "" );
// Add undo information
TransAction ta = new TransAction();
String[] str = getItemText( item );
ta.setNew( new String[][]{ str }, new int[]{ rownr } );
addUndo( ta );
setRowNums();
edit( rownr, 1 );
}
private void insertRowAfter() {
if ( readonly ) {
return;
}
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int rownr = table.indexOf( row );
TableItem item = new TableItem( table, SWT.NONE, rownr + 1 );
item.setText( 1, "" );
// Add undo information
TransAction ta = new TransAction();
String[] str = getItemText( item );
ta.setNew( new String[][]{ str }, new int[]{ rownr + 1 } );
addUndo( ta );
setRowNums();
edit( rownr + 1, 1 );
}
public void clearAll() {
clearAll( false );
}
public void clearAll( boolean ask ) {
int id = SWT.YES;
if ( ask ) {
MessageBox mb = new MessageBox( parent.getShell(), SWT.YES | SWT.NO | SWT.ICON_QUESTION );
mb.setMessage( BaseMessages.getString( PKG, "TableView.MessageBox.ClearTable.message" ) );
mb.setText( BaseMessages.getString( PKG, "TableView.MessageBox.ClearTable.title" ) );
id = mb.open();
}
if ( id == SWT.YES ) {
table.removeAll();
new TableItem( table, SWT.NONE );
if ( !readonly ) {
edit( 0, 1 );
}
this.setModified(); // timh
}
}
private void moveRows( int offset ) {
if ( ( offset != 1 ) && ( offset != -1 ) ) {
return;
}
int[] selectionIndicies = table.getSelectionIndices();
int selectedIndex = table.getSelectionIndex();
// selectionIndicies is not guaranteed to be in any order so must sort
// before using
Arrays.sort( selectionIndicies );
if ( offset == 1 ) {
if ( selectionIndicies[selectionIndicies.length - 1] >= table.getItemCount() - 1 ) {
// If the last row in the table is selected then don't move any rows
// down
return;
}
selectionIndicies = moveRowsDown( selectionIndicies );
} else {
if ( selectionIndicies[0] == 0 ) {
// If the first row in the table is selected then don't move any rows up
return;
}
selectionIndicies = moveRowsUp( selectionIndicies );
}
activeTableRow = selectedIndex + offset;
table.setSelection( activeTableRow );
table.setSelection( selectionIndicies );
activeTableItem = table.getItem( activeTableRow );
}
private int[] moveRowsDown( int[] selectionIndicies ) {
// Move the selected rows down starting with the lowest row
for ( int i = selectionIndicies.length - 1; i >= 0; i-- ) {
int row = selectionIndicies[i];
int newRow = row + 1;
moveRow( row, newRow );
TransAction ta = new TransAction();
ta.setItemMove( new int[]{ row }, new int[]{ newRow } );
addUndo( ta );
selectionIndicies[i] = newRow;
}
return selectionIndicies;
}
private int[] moveRowsUp( int[] selectionIndicies ) {
// Move the selected rows up starting with the highest row
for ( int i = 0; i < selectionIndicies.length; i++ ) {
int row = selectionIndicies[i];
int newRow = row - 1;
moveRow( row, newRow );
TransAction ta = new TransAction();
ta.setItemMove( new int[]{ row }, new int[]{ newRow } );
addUndo( ta );
selectionIndicies[i] = newRow;
}
return selectionIndicies;
}
private void moveRow( int from, int to ) {
TableItem rowfrom = table.getItem( from );
TableItem rowto = table.getItem( to );
// Grab the strings on that line...
String[] strfrom = getItemText( rowfrom );
String[] strto = getItemText( rowto );
// Copy the content
for ( int i = 0; i < strfrom.length; i++ ) {
rowfrom.setText( i + 1, strto[i] );
rowto.setText( i + 1, strfrom[i] );
}
setModified();
}
private void copyToAll() {
TableItem row = activeTableItem;
if ( row == null || row.isDisposed() ) {
return;
}
int colnr = activeTableColumn;
if ( colnr == 0 ) {
return;
}
String str = row.getText( colnr );
// Get undo information: all columns
int size = table.getItemCount();
String[][] before = new String[size][];
String[][] after = new String[size][];
int[] index = new int[size];
for ( int i = 0; i < table.getItemCount(); i++ ) {
TableItem item = table.getItem( i );
index[i] = i;
before[i] = getItemText( item );
item.setText( colnr, str );
after[i] = getItemText( item );
}
// Add the undo information!
TransAction ta = new TransAction();
ta.setChanged( before, after, index );
addUndo( ta );
}
private void selectAll() {
table.selectAll();
}
private void unselectAll() {
table.deselectAll();
}
private void clipSelected() {
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
clipboard = new Clipboard( getDisplay() );
TextTransfer tran = TextTransfer.getInstance();
String clip = getSelectedText();
if ( clip == null ) {
return;
}
clipboard.setContents( new String[]{ clip }, new Transfer[]{ tran } );
}
private String getSelectedText() {
String selection = "";
for ( int c = 1; c < table.getColumnCount(); c++ ) {
TableColumn tc = table.getColumn( c );
if ( c > 1 ) {
selection += CLIPBOARD_DELIMITER;
}
selection += tc.getText();
}
selection += Const.CR;
TableItem[] items = table.getSelection();
if ( items.length == 0 ) {
return null;
}
for ( int r = 0; r < items.length; r++ ) {
TableItem ti = items[r];
for ( int c = 1; c < table.getColumnCount(); c++ ) {
if ( c > 1 ) {
selection += CLIPBOARD_DELIMITER;
}
selection += ti.getText( c );
}
selection += Const.CR;
}
return selection;
}
/*
* Example: ----------------------------------------------------------------- Field in stream;Dimension field
* TIME;TIME DATA_TYPE;DATA_TYPE MAP_TYPE;MAP_TYPE RESOLUTION;RESOLUTION START_TIME;START_TIME
* -----------------------------------------------------------------
*
* !! Paste at the end of the table! --> Create new table item for every line
*/
private int getCurrentRownr() {
if ( table.getItemCount() <= 1 ) {
return 0;
}
TableItem row = activeTableItem;
if ( row == null ) {
return 0;
}
int rownr = table.indexOf( row );
if ( rownr < 0 ) {
rownr = 0;
}
return rownr;
}
private void pasteSelected() {
int rownr = getCurrentRownr();
if ( clipboard != null ) {
clipboard.dispose();
clipboard = null;
}
clipboard = new Clipboard( getDisplay() );
TextTransfer tran = TextTransfer.getInstance();
String text = (String) clipboard.getContents( tran );
if ( text != null ) {
String[] lines = text.split( Const.CR );
if ( lines.length > 1 ) {
// ALlocate complete paste grid!
String[][] grid = new String[lines.length - 1][];
int[] idx = new int[lines.length - 1];
for ( int i = 1; i < lines.length; i++ ) {
grid[i - 1] = lines[i].split( "\t" );
idx[i - 1] = rownr + i;
addItem( idx[i - 1], grid[i - 1] );
}
TransAction ta = new TransAction();
ta.setNew( grid, idx );
addUndo( ta );
}
if ( rownr == 0 && table.getItemCount() > rownr + 1 ) {
// Empty row at rownr?
// Remove it!
if ( isEmpty( rownr, -1 ) ) {
table.remove( rownr );
}
}
setRowNums();
unEdit();
setModified();
}
}
private void addItem( int pos, String[] str ) {
TableItem item = new TableItem( table, SWT.NONE, pos );
for ( int i = 0; i < str.length; i++ ) {
item.setText( i + 1, str[i] );
}
setModified();
}
private void cutSelected() {
clipSelected(); // copy selected lines to clipboard
delSelected();
}
private void delSelected() {
if ( nrNonEmpty() == 0 ) {
return;
}
// Which items do we delete?
int[] items = table.getSelectionIndices();
if ( items.length == 0 ) {
return;
}
// Save undo information
String[][] before = new String[items.length][];
for ( int i = 0; i < items.length; i++ ) {
TableItem ti = table.getItem( items[i] );
before[i] = getItemText( ti );
}
TransAction ta = new TransAction();
ta.setDelete( before, items );
addUndo( ta );
TableItem row = activeTableItem;
if ( row == null ) {
return;
}
int rowbefore = table.indexOf( row );
// Delete selected items.
table.remove( items );
if ( table.getItemCount() == 0 ) {
TableItem item = new TableItem( table, SWT.NONE );
// Save undo infomation!
String[] stritem = getItemText( item );
ta = new TransAction();
ta.setNew( new String[][]{ stritem }, new int[]{ 0 } );
addUndo( ta );
}
// If the last row is gone, put the selection back on last-1!
if ( rowbefore >= table.getItemCount() ) {
rowbefore = table.getItemCount() - 1;
}
// After the delete, we put the cursor on the same row as before (if we can)
if ( rowbefore < table.getItemCount() && table.getItemCount() > 0 ) {
setPosition( rowbefore, 1 );
table.setSelection( rowbefore );
activeTableRow = rowbefore;
}
setRowNums();
setModified();
}
private void keepSelected() {
// Which items are selected?
int[] sels = table.getSelectionIndices();
int size = table.getItemCount();
// Which items do we delete?
int[] items = new int[size - sels.length];
if ( items.length == 0 ) {
return; // everything is selected: keep everything, do nothing.
}
// Set the item-indices to delete...
int nr = 0;
for ( int i = 0; i < table.getItemCount(); i++ ) {
boolean selected = false;
for ( int j = 0; j < sels.length && !selected; j++ ) {
if ( sels[j] == i ) {
selected = true;
}
}
if ( !selected ) {
items[nr] = i;
nr++;
}
}
// Save undo information
String[][] before = new String[items.length][];
for ( int i = 0; i < items.length; i++ ) {
TableItem ti = table.getItem( items[i] );
before[i] = getItemText( ti );
}
TransAction ta = new TransAction();
ta.setDelete( before, items );
addUndo( ta );
// Delete selected items.
table.remove( items );
if ( table.getItemCount() == 0 ) {
TableItem item = new TableItem( table, SWT.NONE );
// Save undo infomation!
String[] stritem = getItemText( item );
ta = new TransAction();
ta.setNew( new String[][]{ stritem }, new int[]{ 0 } );
addUndo( ta );
}
/*
* try { table.getRow(); } catch(Exception e) // Index is too high: lower to last available value {
* setPosition(table.getItemCount()-1, 1); }
*/
setRowNums();
setModified();
}
private void setPosition( int rownr, int colnr ) {
activeTableColumn = colnr;
activeTableRow = rownr;
if ( rownr >= 0 ) {
activeTableItem = table.getItem( rownr );
}
}
public void edit( int rownr, int colnr ) {
setPosition( rownr, colnr );
edit( rownr, colnr, true, (char) 0 );
}
private void edit( int rownr, int colnr, boolean selectText, char extra ) {
selectionStart = -1;
TableItem row = table.getItem( rownr );
Control oldEditor = editor.getEditor();
if ( oldEditor != null && !oldEditor.isDisposed() ) {
try {
oldEditor.dispose();
} catch ( SWTException swte ) {
// Eat "Widget Is Disposed Exception" : did you ever!!!
}
}
activeTableItem = table.getItem( activeTableRow ); // just to make sure, clean
// up afterwards.
table.showItem( row );
table.setSelection( new TableItem[]{ row } );
if ( columns.length == 0 ) {
return;
}
switch ( columns[colnr - 1].getType() ) {
case ColumnInfo.COLUMN_TYPE_TEXT:
isTextButton = false;
editText( row, rownr, colnr, selectText, extra, columns[colnr - 1] );
break;
case ColumnInfo.COLUMN_TYPE_CCOMBO:
case ColumnInfo.COLUMN_TYPE_FORMAT:
editCombo( row, rownr, colnr );
break;
case ColumnInfo.COLUMN_TYPE_BUTTON:
editButton( row, rownr, colnr );
break;
case ColumnInfo.COLUMN_TYPE_TEXT_BUTTON:
if ( columns[colnr - 1].shouldRenderTextVarButton() ) {
isTextButton = true;
} else {
isTextButton = false;
}
editText( row, rownr, colnr, selectText, extra, columns[colnr - 1] );
break;
default:
break;
}
}
private String[] getItemText( TableItem row ) {
if ( row.isDisposed() ) {
return null;
}
String[] retval = new String[table.getColumnCount() - 1];
for ( int i = 0; i < retval.length; i++ ) {
retval[i] = row.getText( i + 1 );
}
return retval;
}
private void editText( TableItem row, final int rownr, final int colnr, boolean selectText, char extra,
ColumnInfo columnInfo ) {
beforeEdit = getItemText( row );
fieldChanged = false;
ColumnInfo colinfo = columns[colnr - 1];
if ( colinfo.isReadOnly() ) {
return;
}
if ( colinfo.getDisabledListener() != null ) {
boolean disabled = colinfo.getDisabledListener().isFieldDisabled( rownr );
if ( disabled ) {
return;
}
}
if ( text != null && !text.isDisposed() ) {
text.dispose();
}
if ( colinfo.getSelectionAdapter() != null ) {
Event e = new Event();
e.widget = this;
e.x = colnr;
e.y = rownr;
columns[colnr - 1].getSelectionAdapter().widgetSelected( new SelectionEvent( e ) );
return;
}
String content = row.getText( colnr ) + ( extra != 0 ? "" + extra : "" );
String tooltip = columns[colnr - 1].getToolTip();
final boolean useVariables = columns[colnr - 1].isUsingVariables();
final boolean passwordField = columns[colnr - 1].isPasswordField();
final ModifyListener modifyListener = new ModifyListener() {
@Override
public void modifyText( ModifyEvent me ) {
setColumnWidthBasedOnTextField( colnr, useVariables );
}
};
if ( useVariables ) {
GetCaretPositionInterface getCaretPositionInterface = new GetCaretPositionInterface() {
@Override
public int getCaretPosition() {
return ( (TextVar) text ).getTextWidget().getCaretPosition();
}
};
// The text widget will be disposed when we get here
// So we need to write to the table row
//
InsertTextInterface insertTextInterface = new InsertTextInterface() {
@Override
public void insertText( String string, int position ) {
StringBuilder buffer = new StringBuilder( table.getItem( rownr ).getText( colnr ) );
buffer.insert( position, string );
table.getItem( rownr ).setText( colnr, buffer.toString() );
int newPosition = position + string.length();
edit( rownr, colnr );
( (TextVar) text ).setSelection( newPosition );
( (TextVar) text ).showSelection();
setColumnWidthBasedOnTextField( colnr, useVariables );
}
};
final TextVar textWidget;
if ( passwordField ) {
textWidget = new PasswordTextVar( variables, table, SWT.NONE, getCaretPositionInterface, insertTextInterface );
} else if ( isTextButton ) {
textWidget =
new TextVarButton( variables, table, SWT.NONE, getCaretPositionInterface, insertTextInterface,
columnInfo.getTextVarButtonSelectionListener() );
} else {
textWidget = new TextVar( variables, table, SWT.NONE, getCaretPositionInterface, insertTextInterface );
}
text = textWidget;
textWidget.setText( content );
if ( lsMod != null ) {
textWidget.addModifyListener( lsMod );
}
textWidget.addModifyListener( lsUndo );
textWidget.setSelection( content.length() );
// last_carret_position = content.length();
textWidget.addKeyListener( lsKeyText );
// Make the column larger so we can still see the string we're entering...
textWidget.addModifyListener( modifyListener );
if ( selectText ) {
textWidget.selectAll();
}
if ( tooltip != null ) {
textWidget.setToolTipText( tooltip );
} else {
textWidget.setToolTipText( "" );
}
textWidget.addTraverseListener( lsTraverse );
textWidget.addFocusListener( lsFocusText );
} else {
Text textWidget = new Text( table, SWT.NONE );
text = textWidget;
textWidget.setText( content );
if ( lsMod != null ) {
textWidget.addModifyListener( lsMod );
}
textWidget.addModifyListener( lsUndo );
textWidget.setSelection( content.length() );
// last_carret_position = content.length();
textWidget.addKeyListener( lsKeyText );
// Make the column larger so we can still see the string we're entering...
textWidget.addModifyListener( modifyListener );
if ( selectText ) {
textWidget.selectAll();
}
if ( tooltip != null ) {
textWidget.setToolTipText( tooltip );
} else {
textWidget.setToolTipText( "" );
}
textWidget.addTraverseListener( lsTraverse );
textWidget.addFocusListener( lsFocusText );
}
props.setLook( text, Props.WIDGET_STYLE_TABLE );
int width = tablecolumn[colnr].getWidth();
int height = 30;
editor.horizontalAlignment = SWT.LEFT;
editor.grabHorizontal = true;
// Open the text editor in the correct column of the selected row.
editor.setEditor( text, row, colnr );
text.setFocus();
text.setSize( width, height );
editor.layout();
}
private void setColumnWidthBasedOnTextField( final int colnr, final boolean useVariables ) {
String str = getTextWidgetValue( colnr );
int strmax = TableView.dummyGC.textExtent( str, SWT.DRAW_TAB | SWT.DRAW_DELIMITER ).x + 20;
int colmax = tablecolumn[colnr].getWidth();
if ( strmax > colmax ) {
if ( Const.isOSX() || Const.isLinux() ) {
strmax *= 1.4;
}
tablecolumn[colnr].setWidth( strmax + 30 );
// On linux, this causes the text to select everything...
// This is because the focus is lost and re-gained. Nothing we can do
// about it now.
if ( useVariables ) {
TextVar widget = (TextVar) text;
int idx = widget.getTextWidget().getCaretPosition();
widget.selectAll();
widget.showSelection();
widget.setSelection( 0 );
widget.showSelection();
widget.setSelection( idx );
} else {
Text widget = (Text) text;
int idx = widget.getCaretPosition();
widget.selectAll();
widget.showSelection();
widget.setSelection( 0 );
widget.showSelection();
widget.setSelection( idx );
}
}
}
private String[] getComboValues( TableItem row, ColumnInfo colinfo ) {
if ( colinfo.getType() == ColumnInfo.COLUMN_TYPE_FORMAT ) {
int type = ValueMetaFactory.getIdForValueMeta( row.getText( colinfo.getFieldTypeColumn() ) );
switch ( type ) {
case ValueMetaInterface.TYPE_DATE:
return Const.getDateFormats();
case ValueMetaInterface.TYPE_INTEGER:
case ValueMetaInterface.TYPE_BIGNUMBER:
case ValueMetaInterface.TYPE_NUMBER:
return Const.getNumberFormats();
case ValueMetaInterface.TYPE_STRING:
return Const.getConversionFormats();
default:
return new String[0];
}
}
return colinfo.getComboValues();
}
private void editCombo( TableItem row, int rownr, int colnr ) {
beforeEdit = getItemText( row );
fieldChanged = false;
ColumnInfo colinfo = columns[colnr - 1];
if ( colinfo.isReadOnly() && colinfo.getSelectionAdapter() != null ) {
return;
}
if ( colinfo.getDisabledListener() != null ) {
boolean disabled = colinfo.getDisabledListener().isFieldDisabled( rownr );
if ( disabled ) {
return;
}
}
combo = new CCombo( table, colinfo.isReadOnly() ? SWT.READ_ONLY : SWT.NONE );
props.setLook( combo, Props.WIDGET_STYLE_TABLE );
combo.addTraverseListener( lsTraverse );
combo.addModifyListener( lsModCombo );
combo.addFocusListener( lsFocusCombo );
String[] opt = getComboValues( row, colinfo );
if ( colinfo.getComboValuesSelectionListener() != null ) {
opt = colinfo.getComboValuesSelectionListener().getComboValues( row, rownr, colnr );
}
combo.setItems( opt );
combo.setVisibleItemCount( opt.length );
combo.setText( row.getText( colnr ) );
if ( lsMod != null ) {
combo.addModifyListener( lsMod );
}
combo.addModifyListener( lsUndo );
combo.setToolTipText( colinfo.getToolTip() == null ? "" : colinfo.getToolTip() );
combo.setVisible( true );
combo.addKeyListener( lsKeyCombo );
if ( colinfo.getSelectionAdapter() != null ) {
combo.addSelectionListener( columns[colnr - 1].getSelectionAdapter() );
}
editor.horizontalAlignment = SWT.LEFT;
editor.layout();
// Open the text editor in the correct column of the selected row.
editor.setEditor( combo, row, colnr );
combo.setFocus();
combo.layout();
}
private void editButton( TableItem row, int rownr, int colnr ) {
beforeEdit = getItemText( row );
fieldChanged = false;
ColumnInfo colinfo = columns[colnr - 1];
if ( colinfo.isReadOnly() ) {
return;
}
if ( colinfo.getDisabledListener() != null ) {
boolean disabled = colinfo.getDisabledListener().isFieldDisabled( rownr );
if ( disabled ) {
return;
}
}
button = new Button( table, SWT.PUSH );
props.setLook( button, Props.WIDGET_STYLE_TABLE );
String buttonText = columns[colnr - 1].getButtonText();
if ( buttonText != null ) {
button.setText( buttonText );
}
button.setImage( GUIResource.getInstance().getImage( "ui/images/edittext.svg" ) );
SelectionListener selAdpt = colinfo.getSelectionAdapter();
if ( selAdpt != null ) {
button.addSelectionListener( selAdpt );
}
buttonRownr = rownr;
buttonColnr = colnr;
// button.addTraverseListener(lsTraverse);
buttonContent = row.getText( colnr );
String tooltip = columns[colnr - 1].getToolTip();
if ( tooltip != null ) {
button.setToolTipText( tooltip );
} else {
button.setToolTipText( "" );
}
button.addTraverseListener( lsTraverse ); // hop to next field
button.addTraverseListener( new TraverseListener() {
@Override
public void keyTraversed( TraverseEvent arg0 ) {
closeActiveButton();
}
} );
editor.horizontalAlignment = SWT.LEFT;
editor.verticalAlignment = SWT.TOP;
editor.grabHorizontal = false;
editor.grabVertical = false;
Point size = button.computeSize( SWT.DEFAULT, SWT.DEFAULT );
editor.minimumWidth = size.x;
editor.minimumHeight = size.y - 2;
// setRowNums();
editor.layout();
// Open the text editor in the correct column of the selected row.
editor.setEditor( button );
button.setFocus();
// if the button loses focus, destroy it...
/*
* button.addFocusListener(new FocusAdapter() { public void focusLost(FocusEvent e) { button.dispose(); } } );
*/
}
public void setRowNums() {
for ( int i = 0; i < table.getItemCount(); i++ ) {
TableItem item = table.getItem( i );
if ( item != null ) {
String num = "" + ( i + 1 );
// for(int j=num.length();j<3;j++) num="0"+num;
if ( !item.getText( 0 ).equals( num ) ) {
item.setText( 0, num );
}
}
}
}
public void optWidth( boolean header ) {
optWidth( header, 0 );
}
public void optWidth( boolean header, int nrLines ) {
for ( int c = 0; c < table.getColumnCount(); c++ ) {
TableColumn tc = table.getColumn( c );
int max = 0;
if ( header ) {
max = TableView.dummyGC.textExtent( tc.getText(), SWT.DRAW_TAB | SWT.DRAW_DELIMITER ).x;
// Check if the column has a sorted mark set. In that case, we need the
// header to be a bit wider...
//
if ( c == sortfield && sortable ) {
max += 15;
}
}
Set<String> columnStrings = new HashSet<String>();
boolean haveToGetTexts = false;
if ( c > 0 ) {
final ColumnInfo column = columns[c - 1];
if ( column != null ) {
switch ( column.getType() ) {
case ColumnInfo.COLUMN_TYPE_TEXT:
haveToGetTexts = true;
break;
case ColumnInfo.COLUMN_TYPE_CCOMBO:
case ColumnInfo.COLUMN_TYPE_FORMAT:
haveToGetTexts = true;
if ( column.getComboValues() != null ) {
for ( String comboValue : columns[c - 1].getComboValues() ) {
columnStrings.add( comboValue );
}
}
break;
case ColumnInfo.COLUMN_TYPE_BUTTON:
columnStrings.add( column.getButtonText() );
break;
default:
break;
}
}
} else {
haveToGetTexts = true;
}
if ( haveToGetTexts ) {
for ( int r = 0; r < table.getItemCount() && ( r < nrLines || nrLines <= 0 ); r++ ) {
TableItem ti = table.getItem( r );
if ( ti != null ) {
columnStrings.add( ti.getText( c ) );
}
}
}
for ( String str : columnStrings ) {
int len = TableView.dummyGC.textExtent( str == null ? "" : str, SWT.DRAW_TAB | SWT.DRAW_DELIMITER ).x;
if ( len > max ) {
max = len;
}
}
try {
int extra = 15;
if ( Const.isWindows() ) {
extra += 15;
} else if ( Const.isLinux() ) {
extra += 15;
}
// Platform specific code not needed any more with current version SWT
// if (Const.isOSX() || Const.isLinux()) max*=1.25;
if ( tc.getWidth() != max + extra ) {
if ( c > 0 || ( c == 0 && addIndexColumn ) ) {
if ( columns[c - 1].getWidth() == -1 ) {
tc.setWidth( max + extra );
} else {
tc.setWidth( columns[c - 1].getWidth() );
}
}
}
} catch ( Exception e ) {
// Ignore errors
}
}
unEdit();
}
/*
* Remove empty rows in the table...
*/
public void removeEmptyRows() {
removeEmptyRows( -1 );
}
private boolean isEmpty( int rownr, int colnr ) {
boolean empty = false;
TableItem item = table.getItem( rownr );
if ( item != null ) {
if ( colnr >= 0 ) {
String str = item.getText( colnr );
if ( str == null || str.length() == 0 ) {
empty = true;
}
} else {
empty = true;
for ( int j = 1; j < table.getColumnCount(); j++ ) {
String str = item.getText( j );
if ( str != null && str.length() > 0 ) {
empty = false;
}
}
}
}
return empty;
}
public void removeEmptyRows( int column ) {
// Remove "empty" table items, where item.getText(1) is empty, length==0
for ( int i = table.getItemCount() - 1; i >= 0; i-- ) {
if ( isEmpty( i, column ) ) {
table.remove( i );
}
}
if ( table.getItemCount() == 0 ) { // At least one empty row!
new TableItem( table, SWT.NONE );
}
}
private List<Integer> nonEmptyIndexes;
/**
* Count non-empty rows in the table... IMPORTANT: always call this method before calling getNonEmpty(int selnr): for
* performance reasons we cache the row indexes.
*
* @return the number of rows/table-items that are not empty
*/
public int nrNonEmpty() {
nonEmptyIndexes = new ArrayList<Integer>();
// Count only non-empty rows
for ( int i = 0; i < table.getItemCount(); i++ ) {
if ( !isEmpty( i, -1 ) ) {
nonEmptyIndexes.add( i );
}
}
return nonEmptyIndexes.size();
}
/**
* Return the row/table-item on the specified index. IMPORTANT: the indexes of the non-empty rows are populated with a
* call to nrNonEmpty(). Make sure to call that first.
*
* @param index the index of the non-empty row/table-item
* @return the requested non-empty row/table-item
*/
public TableItem getNonEmpty( int index ) {
int nonEmptyIndex = nonEmptyIndexes.get( index );
return table.getItem( nonEmptyIndex );
}
public int indexOfString( String str, int column ) {
int nrNonEmptyFields = nrNonEmpty();
for ( int i = 0; i < nrNonEmptyFields; i++ ) {
String cmp = getNonEmpty( i ).getText( column );
if ( str.equalsIgnoreCase( cmp ) ) {
return i;
}
}
return -1;
}
@Override
public ScrollBar getHorizontalBar() {
return table.getHorizontalBar();
}
@Override
public ScrollBar getVerticalBar() {
return table.getVerticalBar();
}
private void addUndo( TransAction ta ) {
while ( undo.size() > undoPosition + 1 && undo.size() > 0 ) {
int last = undo.size() - 1;
undo.remove( last );
}
undo.add( ta );
undoPosition++;
while ( undo.size() > props.getMaxUndo() ) {
undo.remove( 0 );
undoPosition--;
}
setUndoMenu();
}
private void undoAction() {
TransAction ta = previousUndo();
if ( ta == null ) {
return;
}
// Get the current cursor position
int rownr = getCurrentRownr();
setUndoMenu(); // something changed: change the menu
switch ( ta.getType() ) {
//
// NEW
//
// We created a table item: undo this...
case TransAction.TYPE_ACTION_NEW_TABLEITEM:
int[] idx = ta.getCurrentIndex();
table.remove( idx );
for ( int i = 0; i < idx.length; i++ ) {
if ( idx[i] < rownr ) {
rownr--; // shift with the rest.
}
}
// See if the table is empty, if so : undo again!!
if ( table.getItemCount() == 0 ) {
undoAction();
}
setRowNums();
break;
//
// DELETE
//
// un-Delete the rows at correct location: re-insert
case TransAction.TYPE_ACTION_DELETE_TABLEITEM:
idx = ta.getCurrentIndex();
String[][] str = (String[][]) ta.getCurrent();
for ( int i = 0; i < idx.length; i++ ) {
addItem( idx[i], str[i] );
if ( idx[i] <= rownr ) {
rownr++;
}
}
setRowNums();
break;
//
// CHANGE
//
// Change the item back to the original row-value.
case TransAction.TYPE_ACTION_CHANGE_TABLEITEM:
idx = ta.getCurrentIndex();
String[][] prev = (String[][]) ta.getPrevious();
for ( int x = 0; x < idx.length; x++ ) {
TableItem item = table.getItem( idx[x] );
for ( int i = 0; i < prev[x].length; i++ ) {
item.setText( i + 1, prev[x][i] );
}
}
break;
//
// POSITION
//
// The position of a row has changed...
case TransAction.TYPE_ACTION_POSITION_TABLEITEM:
int[] curr = ta.getCurrentIndex();
int[] prevIdx = ta.getPreviousIndex();
for ( int i = 0; i < curr.length; i++ ) {
moveRow( prevIdx[i], curr[i] );
}
setRowNums();
break;
default:
break;
}
if ( rownr >= table.getItemCount() ) {
rownr = table.getItemCount() - 1;
}
if ( rownr < 0 ) {
rownr = 0;
}
// cursor.setSelection(rownr, 0);
selectRows( rownr, rownr );
}
private void redoAction() {
TransAction ta = nextUndo();
if ( ta == null ) {
return;
}
// Get the current cursor position
int rownr = getCurrentRownr();
setUndoMenu(); // something changed: change the menu
switch ( ta.getType() ) {
//
// NEW
//
case TransAction.TYPE_ACTION_NEW_TABLEITEM:
int[] idx = ta.getCurrentIndex();
String[][] str = (String[][]) ta.getCurrent();
for ( int i = 0; i < idx.length; i++ ) {
addItem( idx[i], str[i] );
if ( idx[i] <= rownr ) {
rownr++; // Shift cursor position with the new items...
}
}
setRowNums();
break;
//
// DELETE
//
case TransAction.TYPE_ACTION_DELETE_TABLEITEM:
idx = ta.getCurrentIndex();
table.remove( idx );
for ( int i = 0; i < idx.length; i++ ) {
if ( idx[i] < rownr ) {
rownr--; // shift with the rest.
}
}
// See if the table is empty, if so : undo again!!
if ( table.getItemCount() == 0 ) {
undoAction();
}
setRowNums();
break;
//
// CHANGE
//
case TransAction.TYPE_ACTION_CHANGE_TABLEITEM:
idx = ta.getCurrentIndex();
String[][] curr = (String[][]) ta.getCurrent();
for ( int x = 0; x < idx.length; x++ ) {
TableItem item = table.getItem( idx[x] );
for ( int i = 0; i < curr[x].length; i++ ) {
item.setText( i + 1, curr[x][i] );
}
}
break;
//
// CHANGE POSITION
//
case TransAction.TYPE_ACTION_POSITION_TABLEITEM:
int[] currIdx = ta.getCurrentIndex();
int[] prev = ta.getPreviousIndex();
for ( int i = 0; i < currIdx.length; i++ ) {
moveRow( currIdx[i], prev[i] );
}
setRowNums();
break;
default:
break;
}
if ( rownr >= table.getItemCount() ) {
rownr = table.getItemCount() - 1;
}
if ( rownr < 0 ) {
rownr = 0;
}
// cursor.setSelection(rownr, 0);
selectRows( rownr, rownr );
}
private void setUndoMenu() {
TransAction prev = viewPreviousUndo();
TransAction next = viewNextUndo();
if ( miEditUndo.isDisposed() || miEditRedo.isDisposed() ) {
return;
}
if ( prev != null ) {
miEditUndo.setEnabled( true );
miEditUndo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.Undo", prev
.toString() ) ) );
} else {
miEditUndo.setEnabled( false );
miEditUndo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.UndoNotAvailable" ) ) );
}
if ( next != null ) {
miEditRedo.setEnabled( true );
miEditRedo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString( PKG, "TableView.menu.Redo", next
.toString() ) ) );
} else {
miEditRedo.setEnabled( false );
miEditRedo.setText( OsHelper.customizeMenuitemText( BaseMessages.getString(
PKG, "TableView.menu.RedoNotAvailable" ) ) );
}
}
// get previous undo, change position
private TransAction previousUndo() {
if ( undo.isEmpty() || undoPosition < 0 ) {
return null; // No undo left!
}
TransAction retval = undo.get( undoPosition );
undoPosition--;
return retval;
}
// View previous undo, don't change position
private TransAction viewPreviousUndo() {
if ( undo.isEmpty() || undoPosition < 0 ) {
return null; // No undo left!
}
TransAction retval = undo.get( undoPosition );
return retval;
}
private TransAction nextUndo() {
int size = undo.size();
if ( size == 0 || undoPosition >= size - 1 ) {
return null; // no redo left...
}
undoPosition++;
TransAction retval = undo.get( undoPosition );
return retval;
}
private TransAction viewNextUndo() {
int size = undo.size();
if ( size == 0 || undoPosition >= size - 1 ) {
return null; // no redo left...
}
TransAction retval = undo.get( undoPosition + 1 );
return retval;
}
private void clearUndo() {
undo = new ArrayList<TransAction>();
undoPosition = -1;
}
private Point getButtonPosition() {
return new Point( buttonColnr, buttonRownr );
}
public String getButtonString() {
return buttonContent;
}
public void setButtonString( String str ) {
Point p = getButtonPosition();
TableItem item = table.getItem( p.y );
item.setText( p.x, str );
}
public void closeActiveButton() {
if ( button != null && !button.isDisposed() ) {
button.dispose();
}
}
public void unEdit() {
if ( text != null && !text.isDisposed() ) {
text.dispose();
}
if ( combo != null && !combo.isDisposed() ) {
combo.dispose();
}
}
// Filtering...
public void setFilter() {
if ( condition == null ) {
condition = new Condition();
}
RowMetaInterface f = getRowWithoutValues();
EnterConditionDialog ecd = new EnterConditionDialog( parent.getShell(), SWT.NONE, f, condition );
Condition cond = ecd.open();
if ( cond != null ) {
ArrayList<Integer> tokeep = new ArrayList<Integer>();
// Apply the condition to the TableView...
int nr = table.getItemCount();
for ( int i = nr - 1; i >= 0; i-- ) {
RowMetaAndData r = getRow( i );
boolean keep = cond.evaluate( r.getRowMeta(), r.getData() );
if ( keep ) {
tokeep.add( Integer.valueOf( i ) );
}
}
int[] sels = new int[tokeep.size()];
for ( int i = 0; i < sels.length; i++ ) {
sels[i] = ( tokeep.get( i ) ).intValue();
}
table.setSelection( sels );
}
}
public RowMetaInterface getRowWithoutValues() {
RowMetaInterface f = new RowMeta();
f.addValueMeta( new ValueMetaInteger( "#" ) );
for ( int i = 0; i < columns.length; i++ ) {
f.addValueMeta( new ValueMetaString( columns[i].getName() ) );
}
return f;
}
public RowMetaAndData getRow( int nr ) {
TableItem ti = table.getItem( nr );
RowMetaInterface rowMeta = getRowWithoutValues();
Object[] rowData = new Object[rowMeta.size()];
rowData[0] = new Long( nr );
for ( int i = 1; i < rowMeta.size(); i++ ) {
rowData[i] = ti.getText( i );
}
return new RowMetaAndData( rowMeta, rowData );
}
public int[] getSelectionIndices() {
return table.getSelectionIndices();
}
public int getSelectionIndex() {
return table.getSelectionIndex();
}
public void remove( int index ) {
table.remove( index );
if ( table.getItemCount() == 0 ) {
new TableItem( table, SWT.NONE );
}
}
public void remove( int[] index ) {
table.remove( index );
if ( table.getItemCount() == 0 ) {
new TableItem( table, SWT.NONE );
}
}
public String getItem( int rownr, int colnr ) {
TableItem item = table.getItem( rownr );
if ( item != null ) {
return item.getText( colnr );
} else {
return null;
}
}
public void add( String... string ) {
TableItem item = new TableItem( table, SWT.NONE );
for ( int i = 0; i < string.length && i + 1 < table.getColumnCount(); i++ ) {
if ( string[i] != null ) {
item.setText( i + 1, string[i] );
}
}
}
public String[] getItem( int rownr ) {
TableItem item = table.getItem( rownr );
if ( item != null ) {
return getItemText( item );
} else {
return null;
}
}
/**
* Get all the strings from a certain column as an array
*
* @param colnr The column to return
* @return the column values as a string array.
*/
public String[] getItems( int colnr ) {
String[] retval = new String[table.getItemCount()];
for ( int i = 0; i < retval.length; i++ ) {
TableItem item = table.getItem( i );
retval[i] = item.getText( colnr + 1 );
}
return retval;
}
public void removeAll() {
table.removeAll();
if ( table.getItemCount() == 0 ) {
new TableItem( table, SWT.NONE );
}
}
public int getItemCount() {
return table.getItemCount();
}
public void setText( String text, int colnr, int rownr ) {
TableItem item = table.getItem( rownr );
item.setText( colnr, text );
}
/**
* @return Returns the readonly.
*/
public boolean isReadonly() {
return readonly;
}
/**
* @param readonly The readonly to set.
*/
public void setReadonly( boolean readonly ) {
this.readonly = readonly;
}
/**
* @return the sortable
*/
public boolean isSortable() {
return sortable;
}
/**
* @param sortable the sortable to set
*/
public void setSortable( boolean sortable ) {
this.sortable = sortable;
if ( !sortable ) {
table.setSortColumn( null );
} else {
table.setSortColumn( table.getColumn( sortfield ) );
}
}
public void setFocusOnFirstEditableField() {
// Look for the first field that can be edited...
int rownr = 0;
boolean gotOne = false;
for ( int colnr = 0; colnr < columns.length && !gotOne; colnr++ ) {
if ( !columns[colnr].isReadOnly() ) {
// edit this one...
gotOne = true;
activeTableItem = table.getItem( rownr );
activeTableColumn = colnr + 1;
edit( rownr, colnr + 1 );
}
}
}
/**
* @return the getSortField
*/
public int getSortField() {
return sortfield;
}
/**
* @return the sortingDescending
*/
public boolean isSortingDescending() {
return sortingDescending;
}
/**
* @param sortingDescending the sortingDescending to set
*/
public void setSortingDescending( boolean sortingDescending ) {
this.sortingDescending = sortingDescending;
}
public Table getTable() {
return table;
}
/**
* @return the numberColumn
*/
public ColumnInfo getNumberColumn() {
return numberColumn;
}
/**
* @param numberColumn the numberColumn to set
*/
public void setNumberColumn( ColumnInfo numberColumn ) {
this.numberColumn = numberColumn;
}
public TableEditor getEditor() {
return editor;
}
public void setEditor( TableEditor editor ) {
this.editor = editor;
}
public void applyOSXChanges() {
if ( text != null && !text.isDisposed() && lsFocusText != null ) {
lsFocusText.focusLost( null );
}
}
/**
* @return the showingBlueNullValues
*/
public boolean isShowingBlueNullValues() {
return showingBlueNullValues;
}
/**
* @param showingBlueNullValues the showingBlueNullValues to set
*/
public void setShowingBlueNullValues( boolean showingBlueNullValues ) {
this.showingBlueNullValues = showingBlueNullValues;
}
/**
* @return the lsContent
*/
public ModifyListener getContentListener() {
return lsContent;
}
/**
* @param lsContent the lsContent to set
*/
public void setContentListener( ModifyListener lsContent ) {
this.lsContent = lsContent;
}
/**
* @return the showingConversionErrorsInline
*/
public boolean isShowingConversionErrorsInline() {
return showingConversionErrorsInline;
}
/**
* @param showingConversionErrorsInline the showingConversionErrorsInline to set
*/
public void setShowingConversionErrorsInline( boolean showingConversionErrorsInline ) {
this.showingConversionErrorsInline = showingConversionErrorsInline;
}
/**
* Returns copy of columns array in order to prevent unintented modifications.
*
* @return columns array
*/
public ColumnInfo[] getColumns() {
return Arrays.copyOf( columns, columns.length );
}
public TableItem getActiveTableItem() {
return activeTableItem;
}
public int getActiveTableColumn() {
return activeTableColumn;
}
}
|
PDI-16032 - Clicking on Row Numbers in certain steps causes new ones to be created
|
ui/src/org/pentaho/di/ui/core/widget/TableView.java
|
PDI-16032 - Clicking on Row Numbers in certain steps causes new ones to be created
|
|
Java
|
apache-2.0
|
afce6290c898c333d07f1146f156cab2c91b295c
| 0
|
vbonamy/esup-uportal,ASU-Capstone/uPortal,drewwills/uPortal,jl1955/uPortal5,GIP-RECIA/esup-uportal,pspaude/uPortal,joansmith/uPortal,drewwills/uPortal,MichaelVose2/uPortal,drewwills/uPortal,EsupPortail/esup-uportal,pspaude/uPortal,mgillian/uPortal,EdiaEducationTechnology/uPortal,jonathanmtran/uPortal,MichaelVose2/uPortal,Jasig/uPortal,apetro/uPortal,jl1955/uPortal5,jonathanmtran/uPortal,GIP-RECIA/esup-uportal,doodelicious/uPortal,Jasig/uPortal-start,joansmith/uPortal,stalele/uPortal,cousquer/uPortal,ASU-Capstone/uPortal,jl1955/uPortal5,vbonamy/esup-uportal,EdiaEducationTechnology/uPortal,timlevett/uPortal,doodelicious/uPortal,stalele/uPortal,stalele/uPortal,ASU-Capstone/uPortal-Forked,timlevett/uPortal,andrewstuart/uPortal,apetro/uPortal,ASU-Capstone/uPortal,Mines-Albi/esup-uportal,phillips1021/uPortal,Jasig/uPortal-start,EdiaEducationTechnology/uPortal,ASU-Capstone/uPortal,doodelicious/uPortal,timlevett/uPortal,bjagg/uPortal,Mines-Albi/esup-uportal,cousquer/uPortal,bjagg/uPortal,andrewstuart/uPortal,drewwills/uPortal,GIP-RECIA/esup-uportal,jhelmer-unicon/uPortal,Jasig/uPortal,kole9273/uPortal,kole9273/uPortal,ASU-Capstone/uPortal-Forked,jameswennmacher/uPortal,jameswennmacher/uPortal,ChristianMurphy/uPortal,kole9273/uPortal,Jasig/SSP-Platform,jameswennmacher/uPortal,vbonamy/esup-uportal,MichaelVose2/uPortal,vertein/uPortal,phillips1021/uPortal,vbonamy/esup-uportal,Jasig/SSP-Platform,jhelmer-unicon/uPortal,GIP-RECIA/esup-uportal,MichaelVose2/uPortal,vbonamy/esup-uportal,chasegawa/uPortal,vertein/uPortal,pspaude/uPortal,pspaude/uPortal,doodelicious/uPortal,chasegawa/uPortal,kole9273/uPortal,Mines-Albi/esup-uportal,jonathanmtran/uPortal,Mines-Albi/esup-uportal,stalele/uPortal,andrewstuart/uPortal,chasegawa/uPortal,EsupPortail/esup-uportal,ChristianMurphy/uPortal,jhelmer-unicon/uPortal,phillips1021/uPortal,ASU-Capstone/uPortal-Forked,Jasig/SSP-Platform,chasegawa/uPortal,stalele/uPortal,GIP-RECIA/esco-portail,mgillian/uPortal,groybal/uPortal,groybal/uPortal,jhelmer-unicon/uPortal,groybal/uPortal,vertein/uPortal,EsupPortail/esup-uportal,jameswennmacher/uPortal,MichaelVose2/uPortal,kole9273/uPortal,andrewstuart/uPortal,joansmith/uPortal,Jasig/SSP-Platform,ASU-Capstone/uPortal,groybal/uPortal,jl1955/uPortal5,ChristianMurphy/uPortal,phillips1021/uPortal,GIP-RECIA/esup-uportal,doodelicious/uPortal,Jasig/SSP-Platform,apetro/uPortal,apetro/uPortal,ASU-Capstone/uPortal-Forked,joansmith/uPortal,timlevett/uPortal,andrewstuart/uPortal,Mines-Albi/esup-uportal,GIP-RECIA/esco-portail,joansmith/uPortal,cousquer/uPortal,jhelmer-unicon/uPortal,EdiaEducationTechnology/uPortal,ASU-Capstone/uPortal-Forked,chasegawa/uPortal,jameswennmacher/uPortal,mgillian/uPortal,phillips1021/uPortal,Jasig/uPortal,jl1955/uPortal5,bjagg/uPortal,apetro/uPortal,vertein/uPortal,GIP-RECIA/esco-portail,EsupPortail/esup-uportal,EsupPortail/esup-uportal,groybal/uPortal
|
/**
* Copyright 2002 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.jasig.portal.channels.webproxy;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.StringTokenizer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.jasig.portal.ChannelCacheKey;
import org.jasig.portal.ChannelRuntimeData;
import org.jasig.portal.ChannelRuntimeProperties;
import org.jasig.portal.ChannelStaticData;
import org.jasig.portal.GeneralRenderingException;
import org.jasig.portal.IMultithreadedCacheable;
import org.jasig.portal.IMultithreadedChannel;
import org.jasig.portal.IMultithreadedMimeResponse;
import org.jasig.portal.MediaManager;
import org.jasig.portal.PortalEvent;
import org.jasig.portal.PortalException;
import org.jasig.portal.PropertiesManager;
import org.jasig.portal.ResourceMissingException;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.security.LocalConnectionContext;
import org.jasig.portal.services.LogService;
import org.jasig.portal.utils.AbsoluteURLFilter;
import org.jasig.portal.utils.CookieCutter;
import org.jasig.portal.utils.DTDResolver;
import org.jasig.portal.utils.ResourceLoader;
import org.jasig.portal.utils.XSLT;
import org.w3c.dom.Document;
import org.w3c.tidy.Tidy;
import org.xml.sax.ContentHandler;
/**
* <p>A channel which transforms and interacts with dynamic XML or HTML.
* See docs/website/developers/channel_docs/reference/CwebProxy.html
* for full documentation.
* </p>
*
* <p>Static Channel Parameters:
* Except where indicated, static parameters can be updated by equivalent
* Runtime parameters. Caching parameters can also be changed temporarily.
* Cache defaults and IPerson restrictions are loaded first from properties,
* and overridden by static data if there.
* </p>
* <ol>
* <li>"cw_xml" - a URI for the source XML document
* <li>"cw_ssl" - a URI for the corresponding .ssl (stylesheet list) file
* <li>"cw_xslTitle" - a title representing the stylesheet (optional)
* <i>If no title parameter is specified, a default
* stylesheet will be chosen according to the media</i>
* <li>"cw_xsl" - a URI for the stylesheet to use
* <i>If <code>cw_xsl</code> is supplied, <code>cw_ssl</code>
* and <code>cw_xslTitle</code> will be ignored.
* <li>"cw_passThrough" - indicates how RunTimeData is to be passed through.
* <i>If <code>cw_passThrough</code> is supplied, and not set
* to "all" or "application", additional RunTimeData
* parameters not starting with "cw_" or "upc_" will be
* passed as request parameters to the XML URI. If
* <code>cw_passThrough</code> is set to "marked", this will
* happen only if there is also a RunTimeData parameter of
* <code>cw_inChannelLink</code>. "application" is intended
* to keep application-specific links in the channel, while
* "all" should keep all links in the channel. This
* distinction is handled entirely in the URL Filters.
* <li>"cw_tidy" - output from <code>xmlUri</code> will be passed though Jtidy
* <li>"cw_info" - a URI to be called for the <code>info</code> event.
* <li>"cw_help" - a URI to be called for the <code>help</code> event.
* <li>"cw_edit" - a URI to be called for the <code>edit</code> event.
* <li>"cw_cacheDefaultMode" - Default caching mode.
* <i>May be <code>none</code> (normally don't cache), or
* <code>all</code> (cache everything).
* <li>"cw_cacheDefaultTimeout" - Default timeout in seconds.
* <li>"cw_cacheMode" - override default for this request only.
* <i>Primarily intended as a runtime parameter, but can
* used statically to override the first instance.</i>
* <li>"cw_cacheTimeout" - override default for this request only.
* <i>Primarily intended as a runtime parameter, but can
* be used statically to override the first instance.</i>
* <li>"cw_person" - IPerson attributes to pass.
* <i>A comma-separated list of IPerson attributes to
* pass to the back end application. The static data
* value will be passed on </i>all<i> requests not
* overridden by a runtime data cw_person except some
* refresh requests.</i>
* <li>"cw_personAllow" - Restrict IPerson attribute passing to this list.
* <i>A comma-separated list of IPerson attributes that
* may be passed via cw_person. An empty or non-existent
* value means use the default value from the corresponding
* property. The special value "*" means all attributes
* are allowed. The value "!*" means none are allowed.
* Static data only.</i>
* <li>"upc_localConnContext" - LocalConnectionContext implementation class.
* <i>The name of a class to use when data sent to the
* backend application needs to be modified or added
* to suit local needs. Static data only.</i>
* </ol>
* <p>Runtime Channel Parameters:</p>
* The following parameters are runtime-only.
* </p>
* <ol>
* <li>"cw_reset" - an instruction to return to reset internal variables.
* <i>The value <code>return</code> resets <code>cw_xml</code>
* to its last value before changed by button events. The
* value "reset" returns all variables to the static data
* values.</i>
* <li>"cw_download" - use download worker for this link or form
* <i>any link or form that contains this parameter will be
* handled by the download worker, if the pass-through mode
* is set to rewrite the link or form. This allows downloads
* from the proxied site to be delivered via the portal,
* primarily useful if the download requires verification
* of a session referenced by a proxied cookie</i>
*
* </ol>
* <p>This channel can be used for all XML formats with appropriate stylesheets.
* All static data parameters as well as additional runtime data parameters
* passed to this channel via HttpRequest will in turn be passed on to the
* XSLT stylesheet as stylesheet parameters. They can be read in the
* stylesheet as follows:
* <code><xsl:param
* name="yourParamName">aDefaultValue</xsl:param></code>
* </p>
* @author Andrew Draskoy, andrew@mun.ca
* @author Sarah Arnott, sarnott@mun.ca
* @version $Revision$
*/
public class CWebProxy implements IMultithreadedChannel, IMultithreadedCacheable, IMultithreadedMimeResponse
{
Map stateTable;
// to prepend to the system-wide cache key
static final String systemCacheId="org.jasig.portal.channels.webproxy.CWebProxy";
// All state variables stored here
private class ChannelState
{
private int id;
private IPerson iperson;
private String person;
private String personAllow;
private HashSet personAllow_set;
private String fullxmlUri;
private String buttonxmlUri;
private String xmlUri;
private String key;
private String passThrough;
private String tidy;
private String sslUri;
private String xslTitle;
private String xslUri;
private String infoUri;
private String helpUri;
private String editUri;
private String cacheDefaultMode;
private String cacheMode;
private String reqParameters;
private long cacheDefaultTimeout;
private long cacheTimeout;
private ChannelRuntimeData runtimeData;
private CookieCutter cookieCutter;
private URLConnection connHolder;
private LocalConnectionContext localConnContext;
private int refresh;
public ChannelState ()
{
fullxmlUri = buttonxmlUri = xmlUri = key = passThrough = sslUri = null;
xslTitle = xslUri = infoUri = helpUri = editUri = tidy = null;
id = 0;
cacheMode = null;
iperson = null;
refresh = -1;
cacheTimeout = cacheDefaultTimeout = PropertiesManager.getPropertyAsLong("org.jasig.portal.channels.webproxy.CWebProxy.cache_default_timeout");
cacheMode = cacheDefaultMode = PropertiesManager.getProperty("org.jasig.portal.channels.webproxy.CWebProxy.cache_default_mode");
personAllow = PropertiesManager.getProperty("org.jasig.portal.channels.webproxy.CWebProxy.person_allow");
runtimeData = null;
cookieCutter = new CookieCutter();
localConnContext = null;
}
}
public CWebProxy ()
{
stateTable = Collections.synchronizedMap(new HashMap());
}
/**
* Passes ChannelStaticData to the channel.
* This is done during channel instantiation time.
* see org.jasig.portal.ChannelStaticData
* @param sd channel static data
* @see ChannelStaticData
*/
public void setStaticData (ChannelStaticData sd, String uid)
{
ChannelState state = new ChannelState();
state.id = sd.getPerson().getID();
state.iperson = sd.getPerson();
state.person = sd.getParameter("cw_person");
String personAllow = sd.getParameter ("cw_personAllow");
if ( personAllow != null && (!personAllow.trim().equals("")))
state.personAllow = personAllow;
// state.personAllow could have been set by a property or static data
if ( state.personAllow != null && (!state.personAllow.trim().equals("!*")) )
{
state.personAllow_set = new HashSet();
StringTokenizer st = new StringTokenizer(state.personAllow,",");
if (st != null)
{
while ( st.hasMoreElements () ) {
String pName = st.nextToken();
if (pName!=null) {
pName = pName.trim();
if (!pName.equals(""))
state.personAllow_set.add(pName);
}
}
}
}
state.xmlUri = sd.getParameter ("cw_xml");
state.sslUri = sd.getParameter ("cw_ssl");
state.xslTitle = sd.getParameter ("cw_xslTitle");
state.xslUri = sd.getParameter ("cw_xsl");
state.fullxmlUri = sd.getParameter ("cw_xml");
state.passThrough = sd.getParameter ("cw_passThrough");
state.tidy = sd.getParameter ("cw_tidy");
state.infoUri = sd.getParameter ("cw_info");
state.helpUri = sd.getParameter ("cw_help");
state.editUri = sd.getParameter ("cw_edit");
state.key = state.xmlUri;
String cacheMode = sd.getParameter ("cw_cacheDefaultMode");
if (cacheMode != null && !cacheMode.trim().equals(""))
state.cacheDefaultMode = cacheMode;
cacheMode = sd.getParameter ("cw_cacheMode");
if (cacheMode != null && !cacheMode.trim().equals(""))
state.cacheMode = cacheMode;
else
state.cacheMode = state.cacheDefaultMode;
String cacheTimeout = sd.getParameter("cw_cacheDefaultTimeout");
if (cacheTimeout != null && !cacheTimeout.trim().equals(""))
state.cacheDefaultTimeout = Long.parseLong(cacheTimeout);
cacheTimeout = sd.getParameter("cw_cacheTimeout");
if (cacheTimeout != null && !cacheTimeout.trim().equals(""))
state.cacheTimeout = Long.parseLong(cacheTimeout);
else
state.cacheTimeout = state.cacheDefaultTimeout;
String connContext = sd.getParameter ("upc_localConnContext");
if (connContext != null && !connContext.trim().equals(""))
{
try
{
state.localConnContext = (LocalConnectionContext) Class.forName(connContext).newInstance();
state.localConnContext.init(sd);
}
catch (Exception e)
{
LogService.log(LogService.ERROR, "CWebProxy: Cannot initialize LocalConnectionContext: " + e);
}
}
stateTable.put(uid,state);
}
/**
* Passes ChannelRuntimeData to the channel.
* This function is called prior to the renderXML() call.
* @param rd channel runtime data
* @see ChannelRuntimeData
*/
public void setRuntimeData (ChannelRuntimeData rd, String uid)
{
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
LogService.log(LogService.ERROR,"CWebProxy:setRuntimeData() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
else
{
state.runtimeData = rd;
if ( rd.isEmpty() && (state.refresh != -1) ) {
// A refresh-- State remains the same.
if ( state.buttonxmlUri != null ) {
state.key = state.buttonxmlUri;
state.fullxmlUri = state.buttonxmlUri;
state.refresh = 0;
} else {
if ( state.refresh == 0 )
state.key = state.fullxmlUri;
state.fullxmlUri = state.xmlUri;
state.refresh = 1;
}
} else {
state.refresh = 0;
String xmlUri = state.runtimeData.getParameter("cw_xml");
if (xmlUri != null) {
state.xmlUri = xmlUri;
// don't need an explicit reset if a new URI is provided.
state.buttonxmlUri = null;
}
String sslUri = state.runtimeData.getParameter("cw_ssl");
if (sslUri != null)
state.sslUri = sslUri;
String xslTitle = state.runtimeData.getParameter("cw_xslTitle");
if (xslTitle != null)
state.xslTitle = xslTitle;
String xslUri = state.runtimeData.getParameter("cw_xsl");
if (xslUri != null)
state.xslUri = xslUri;
String passThrough = state.runtimeData.getParameter("cw_passThrough");
if (passThrough != null)
state.passThrough = passThrough;
String person = state.runtimeData.getParameter("cw_person");
if (person != null)
state.person = person;
String tidy = state.runtimeData.getParameter("cw_tidy");
if (tidy != null)
state.tidy = tidy;
String infoUri = state.runtimeData.getParameter("cw_info");
if (infoUri != null)
state.infoUri = infoUri;
String editUri = state.runtimeData.getParameter("cw_edit");
if (editUri != null)
state.editUri = editUri;
String helpUri = state.runtimeData.getParameter("cw_help");
if (helpUri != null)
state.helpUri = helpUri;
String cacheTimeout = state.runtimeData.getParameter("cw_cacheDefaultTimeout");
if (cacheTimeout != null)
state.cacheDefaultTimeout = Long.parseLong(cacheTimeout);
cacheTimeout = state.runtimeData.getParameter("cw_cacheTimeout");
if (cacheTimeout != null)
state.cacheTimeout = Long.parseLong(cacheTimeout);
else
state.cacheTimeout = state.cacheDefaultTimeout;
String cacheDefaultMode = state.runtimeData.getParameter("cw_cacheDefaultMode");
if (cacheDefaultMode != null) {
state.cacheDefaultMode = cacheDefaultMode;
}
String cacheMode = state.runtimeData.getParameter("cw_cacheMode");
if (cacheMode != null) {
state.cacheMode = cacheMode;
} else
state.cacheMode = state.cacheDefaultMode;
// reset is a one-time thing.
String reset = state.runtimeData.getParameter("cw_reset");
if (reset != null) {
if (reset.equalsIgnoreCase("return")) {
state.buttonxmlUri = null;
}
}
if ( state.buttonxmlUri != null ) // shouldn't happen here, but...
state.fullxmlUri = state.buttonxmlUri;
else
{
//LogService.log(LogService.DEBUG, "CWebProxy: xmlUri is " + state.xmlUri);
// pass IPerson atts independent of the value of cw_passThrough
StringBuffer newXML = new StringBuffer();
String appendchar = "";
// here add in attributes according to cw_person
if (state.person != null && state.personAllow_set != null)
{
StringTokenizer st = new StringTokenizer(state.person,",");
if (st != null)
{
while (st.hasMoreElements ())
{
String pName = st.nextToken();
if ((pName!=null)&&(!pName.trim().equals("")))
{
if ( state.personAllow.trim().equals("*") ||
state.personAllow_set.contains(pName) )
{
newXML.append(appendchar);
appendchar = "&";
newXML.append(pName);
newXML.append("=");
// note, this only gets the first one if it's a
// java.util.Vector. Should check
String pVal = (String)state.iperson.getAttribute(pName);
if (pVal != null)
newXML.append(URLEncoder.encode(pVal));
}
else {
LogService.log(LogService.INFO,
"CWebProxy: request to pass " + pName + " denied.");
}
}
}
}
}
// end cw_person code
// Is this a case where we need to pass request parameters to the xmlURI?
if ( state.passThrough != null &&
!state.passThrough.equalsIgnoreCase("none") &&
( state.passThrough.equalsIgnoreCase("all") ||
state.passThrough.equalsIgnoreCase("application") ||
rd.getParameter("cw_inChannelLink") != null ) )
{
// keyword and parameter processing
// NOTE: if both exist, only keywords are appended
String keywords = rd.getKeywords();
if (keywords != null)
{
if (appendchar.equals("&"))
newXML.append("&keywords=" + keywords);
else
newXML.append(keywords);
}
else
{
// want all runtime parameters not specific to WebProxy
Enumeration e=rd.getParameterNames ();
if (e!=null)
{
while (e.hasMoreElements ())
{
String pName = (String) e.nextElement ();
if ( !pName.startsWith("cw_") && !pName.startsWith("upc_")
&& !pName.trim().equals(""))
{
String[] value_array = rd.getParameterValues(pName);
int i = 0;
while ( i < value_array.length )
{
newXML.append(appendchar);
appendchar = "&";
newXML.append(pName);
newXML.append("=");
newXML.append(URLEncoder.encode(value_array[i++].trim()));
}
}
}
}
}
}
state.reqParameters = newXML.toString();
state.fullxmlUri = state.xmlUri;
if (!state.runtimeData.getHttpRequestMethod().equals("POST"))
{
if ((state.reqParameters!=null) && (!state.reqParameters.trim().equals("")))
{
appendchar = (state.xmlUri.indexOf('?') == -1) ? "?" : "&";
state.fullxmlUri = state.fullxmlUri+appendchar+state.reqParameters;
}
state.reqParameters = null;
}
//LogService.log(LogService.DEBUG, "CWebProxy: fullxmlUri now: " + state.fullxmlUri);
}
// set key for cache based on request parameters
// NOTE: POST requests are not idempotent and therefore are not
// retrievable from the cache
if (!state.runtimeData.getHttpRequestMethod().equals("POST"))
state.key = state.fullxmlUri;
else //generate a unique string as key
state.key = String.valueOf((new Date()).getTime());
}
}
}
/**
* Process portal events. Currently supported events are
* EDIT_BUTTON_EVENT, HELP_BUTTON_EVENT, ABOUT_BUTTON_EVENT,
* and SESSION_DONE. The button events work by changing the xmlUri.
* The new Uri's content should contain a link that will refer back
* to the old one at the end of its task.
* @param ev the event
*/
public void receiveEvent (PortalEvent ev, String uid)
{
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
LogService.log(LogService.ERROR,"CWebProxy:receiveEvent() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
else {
int evnum = ev.getEventNumber();
switch (evnum)
{
case PortalEvent.EDIT_BUTTON_EVENT:
if (state.editUri != null)
state.buttonxmlUri = state.editUri;
break;
case PortalEvent.HELP_BUTTON_EVENT:
if (state.helpUri != null)
state.buttonxmlUri = state.helpUri;
break;
case PortalEvent.ABOUT_BUTTON_EVENT:
if (state.infoUri != null)
state.buttonxmlUri = state.infoUri;
break;
case PortalEvent.SESSION_DONE:
stateTable.remove(uid);
break;
// case PortalEvent.UNSUBSCRIBE:
default:
break;
}
}
}
/**
* Acquires ChannelRuntimeProperites from the channel.
* This function may be called by the portal framework throughout the session.
* @see ChannelRuntimeProperties
*/
public ChannelRuntimeProperties getRuntimeProperties (String uid)
{
ChannelRuntimeProperties rp=new ChannelRuntimeProperties();
// determine if such channel is registered
if (stateTable.get(uid) == null)
{
rp.setWillRender(false);
LogService.log(LogService.ERROR,"CWebProxy:getRuntimeProperties() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
}
return rp;
}
/**
* Ask channel to render its content.
* @param out the SAX ContentHandler to output content to
*/
public void renderXML (ContentHandler out, String uid) throws PortalException
{
ChannelState state=(ChannelState)stateTable.get(uid);
if (state == null)
LogService.log(LogService.ERROR,"CWebProxy:renderXML() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
else
{
Document xml = null;
String tidiedXml = null;
try
{
if (state.tidy != null && state.tidy.equals("on"))
tidiedXml = getTidiedXml(state.fullxmlUri, state);
else
xml = getXml(state.fullxmlUri, state);
}
catch (Exception e)
{
throw new GeneralRenderingException ("Problem retrieving contents of " + state.fullxmlUri + ". Please restart channel. ", e, false, true);
}
state.runtimeData.put("baseActionURL", state.runtimeData.getBaseActionURL());
state.runtimeData.put("downloadActionURL", state.runtimeData.getBaseWorkerURL("download"));
// Runtime data parameters are handed to the stylesheet.
// Add any static data parameters so it gets a full set of variables.
// We may wish to remove this feature since we don't need it for
// the default stylesheets now.
if (state.xmlUri != null)
state.runtimeData.put("cw_xml", state.xmlUri);
if (state.sslUri != null)
state.runtimeData.put("cw_ssl", state.sslUri);
if (state.xslTitle != null)
state.runtimeData.put("cw_xslTitle", state.xslTitle);
if (state.xslUri != null)
state.runtimeData.put("cw_xsl", state.xslUri);
if (state.passThrough != null)
state.runtimeData.put("cw_passThrough", state.passThrough);
if (state.tidy != null)
state.runtimeData.put("cw_tidy", state.tidy);
if (state.infoUri != null)
state.runtimeData.put("cw_info", state.infoUri);
if (state.helpUri != null)
state.runtimeData.put("cw_help", state.helpUri);
if (state.editUri != null)
state.runtimeData.put("cw_edit", state.editUri);
if (state.person != null)
state.runtimeData.put("cw_person", state.person);
if (state.personAllow != null)
state.runtimeData.put("cw_personAllow", state.personAllow);
XSLT xslt = XSLT.getTransformer(this, state.runtimeData.getLocales());
if (tidiedXml != null)
xslt.setXML(tidiedXml);
else
xslt.setXML(xml);
if (state.xslUri != null && (!state.xslUri.trim().equals("")))
xslt.setXSL(state.xslUri);
else
xslt.setXSL(state.sslUri, state.xslTitle, state.runtimeData.getBrowserInfo());
// Determine mime type
MediaManager mm = new MediaManager();
String media = mm.getMedia(state.runtimeData.getBrowserInfo());
String mimeType = mm.getReturnMimeType(media);
CWebProxyURLFilter filter2 = CWebProxyURLFilter.newCWebProxyURLFilter(mimeType, state.runtimeData, out);
AbsoluteURLFilter filter1 = AbsoluteURLFilter.newAbsoluteURLFilter(mimeType, state.xmlUri, filter2);
xslt.setTarget(filter1);
xslt.setStylesheetParameters(state.runtimeData);
xslt.transform();
}
}
/**
* Get the contents of a URI as a Document object. This is used if tidy
* is not set or equals 'off'.
* Also includes support for cookies.
* @param uri the URI
* @return the data pointed to by a URI as a Document object
*/
private Document getXml(String uri, ChannelState state) throws Exception
{
URLConnection urlConnect = getConnection(uri, state);
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setNamespaceAware(false);
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
DTDResolver dtdResolver = new DTDResolver();
docBuilder.setEntityResolver(dtdResolver);
return docBuilder.parse(urlConnect.getInputStream());
}
/**
* Get the contents of a URI as a String but send it through tidy first.
* Also includes support for cookies.
* @param uri the URI
* @return the data pointed to by a URI as a String
*/
private String getTidiedXml(String uri, ChannelState state) throws Exception
{
URLConnection urlConnect = getConnection(uri, state);
// get character encoding from Content-Type header
String encoding = null;
String ct = urlConnect.getContentType();
int i;
if (ct!=null && (i=ct.indexOf("charset="))!=-1)
{
encoding = ct.substring(i+8).trim();
if ((i=encoding.indexOf(";"))!=-1)
encoding = encoding.substring(0,i).trim();
if (encoding.indexOf("\"")!=-1)
encoding = encoding.substring(1,encoding.length()+1);
}
Tidy tidy = new Tidy ();
tidy.setXHTML (true);
tidy.setDocType ("omit");
tidy.setQuiet(true);
tidy.setShowWarnings(false);
tidy.setNumEntities(true);
tidy.setWord2000(true);
// If charset is specified in header, set JTidy's
// character encoding to either UTF-8, ISO-8859-1
// or ISO-2022 accordingly (NOTE that these are
// the only character encoding sets that are supported in
// JTidy). If character encoding is not specified,
// UTF-8 is the default.
if (encoding != null)
{
if (encoding.toLowerCase().equals("iso-8859-1"))
tidy.setCharEncoding(org.w3c.tidy.Configuration.LATIN1);
else if (encoding.toLowerCase().equals("iso-2022-jp"))
tidy.setCharEncoding(org.w3c.tidy.Configuration.ISO2022);
else
tidy.setCharEncoding(org.w3c.tidy.Configuration.UTF8);
}
else
{
tidy.setCharEncoding(org.w3c.tidy.Configuration.UTF8);
}
PrintWriter pw;
if ( System.getProperty("os.name").indexOf("Windows") != -1 )
{
pw = new PrintWriter(new FileOutputStream("nul"));
tidy.setErrout(pw);
}
else
{
pw = new PrintWriter(new FileOutputStream("/dev/null"));
tidy.setErrout(pw);
}
ByteArrayOutputStream stream = new ByteArrayOutputStream (1024);
BufferedOutputStream out = new BufferedOutputStream (stream);
tidy.parse (urlConnect.getInputStream(), out);
pw.close();
String tidiedXml = stream.toString();
stream.close();
out.close();
if ( tidy.getParseErrors() > 0 )
throw new GeneralRenderingException("Unable to convert input document to XHTML");
return tidiedXml;
}
private URLConnection getConnection(String uri, ChannelState state) throws Exception
{
// before making the connection, ensure all spaces in the URI are encoded
// (Note that URLEncoder.encode(String uri) cannot be used because
// this method encodes everything, including forward slashes and
// forward slashes are used for determining if the URL is
// relative or absolute)
uri = uri.trim();
if (uri.indexOf(" ") != -1)
{
StringBuffer sbuff = new StringBuffer();
int i;
while( (i= uri.indexOf(" ")) != -1)
{
sbuff.append(uri.substring(0, i));
sbuff.append("%20");
uri = uri.substring(i+1);
}
sbuff.append(uri);
uri = sbuff.toString();
}
// String.replaceAll(String,String) - since jdk 1.4
//uri = uri.replaceAll(" ", "%20");
URL url;
if (state.localConnContext != null)
url = ResourceLoader.getResourceAsURL(this.getClass(), state.localConnContext.getDescriptor(uri, state.runtimeData));
else
url = ResourceLoader.getResourceAsURL(this.getClass(), uri);
// get info from url for cookies
String domain = url.getHost().trim();
String path = url.getPath();
if ( path.indexOf("/") != -1 )
{
if (path.lastIndexOf("/") != 0)
path = path.substring(0, path.lastIndexOf("/"));
}
String port = Integer.toString(url.getPort());
//get connection
URLConnection urlConnect = url.openConnection();
String protocol = url.getProtocol();
if (protocol.equals("http") || protocol.equals("https"))
{
if (domain != null && path != null)
{
//prepare the connection by setting properties and sending data
HttpURLConnection httpUrlConnect = (HttpURLConnection) urlConnect;
httpUrlConnect.setInstanceFollowRedirects(false);
//send any cookie headers to proxied application
if(state.cookieCutter.cookiesExist())
state.cookieCutter.sendCookieHeader(httpUrlConnect, domain, path, port);
//set connection properties if request method was post
if (state.runtimeData.getHttpRequestMethod().equals("POST"))
{
if ((state.reqParameters!=null) && (!state.reqParameters.trim().equals("")))
{
httpUrlConnect.setRequestMethod("POST");
httpUrlConnect.setAllowUserInteraction(false);
httpUrlConnect.setDoOutput(true);
}
}
//send local data, if required
//can call getOutputStream in sendLocalData (ie. to send post params)
//(getOutputStream can be called twice on an HttpURLConnection)
if (state.localConnContext != null)
{
try
{
state.localConnContext.sendLocalData(httpUrlConnect, state.runtimeData);
}
catch (Exception e)
{
LogService.log(LogService.ERROR, "CWebProxy: Unable to send data through " + state.runtimeData.getParameter("upc_localConnContext") + ": " + e.getMessage());
}
}
//send the request parameters by post, if required
//at this point, set or send methods cannot be called on the connection
//object (they must be called before sendLocalData)
if (state.runtimeData.getHttpRequestMethod().equals("POST")){
if ((state.reqParameters!=null) && (!state.reqParameters.trim().equals(""))){
PrintWriter post = new PrintWriter(httpUrlConnect.getOutputStream());
post.print(state.reqParameters);
post.flush();
post.close();
state.reqParameters=null;
}
}
//receive cookie headers
state.cookieCutter.storeCookieHeader(httpUrlConnect, domain, path, port);
int status = httpUrlConnect.getResponseCode();
String location = httpUrlConnect.getHeaderField("Location");
switch (status)
{
case HttpURLConnection.HTTP_NOT_FOUND:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 404: Not Found");
case HttpURLConnection.HTTP_FORBIDDEN:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 403: Forbidden");
case HttpURLConnection.HTTP_INTERNAL_ERROR:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 500: Internal Server Error");
case HttpURLConnection.HTTP_NO_CONTENT:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 204: No Content");
/*
* Note: these cases apply to http status codes 302 and 303
* this will handle automatic redirection to a new GET URL
*/
case HttpURLConnection.HTTP_MOVED_TEMP:
httpUrlConnect.disconnect();
httpUrlConnect = (HttpURLConnection) getConnection(location,state);
break;
case HttpURLConnection.HTTP_SEE_OTHER:
httpUrlConnect.disconnect();
httpUrlConnect = (HttpURLConnection) getConnection(location,state);
break;
/*
* Note: this cases apply to http status code 301
* it will handle the automatic redirection of GET requests.
* The spec calls for a POST redirect to be verified manually by the user
* Rather than bypass this security restriction, we will throw an exception
*/
case HttpURLConnection.HTTP_MOVED_PERM:
if (state.runtimeData.getHttpRequestMethod().equals("GET")){
httpUrlConnect.disconnect();
httpUrlConnect = (HttpURLConnection) getConnection(location,state);
}
else {
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 301: POST Redirection currently not supported");
}
break;
default:
break;
}
return (URLConnection) httpUrlConnect;
}
}
return urlConnect;
}
public ChannelCacheKey generateKey(String uid)
{
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
{
LogService.log(LogService.ERROR,"CWebProxy:generateKey() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
return null;
}
if ( state.cacheMode.equalsIgnoreCase("none") )
return null;
// else if http see first if caching is on or off. if it's on,
// store the validity time in the state, cache it, and further
// resolve later with isValid.
// check cache-control, no-cache, must-revalidate, max-age,
// Date & Expires, expiry in past
// for 1.0 check pragma for no-cache
// add a warning to docs about not a full http 1.1 impl.
ChannelCacheKey k = new ChannelCacheKey();
StringBuffer sbKey = new StringBuffer(1024);
// Only INSTANCE scope is currently supported.
k.setKeyScope(ChannelCacheKey.INSTANCE_KEY_SCOPE);
sbKey.append("sslUri:").append(state.sslUri).append(", ");
// xslUri may either be specified as a parameter to this channel
// or we will get it by looking in the stylesheet list file
String xslUriForKey = state.xslUri;
try {
if (xslUriForKey == null) {
String sslUri = ResourceLoader.getResourceAsURLString(this.getClass(), state.sslUri);
xslUriForKey = XSLT.getStylesheetURI(sslUri, state.runtimeData.getBrowserInfo());
}
} catch (Exception e) {
xslUriForKey = "Not attainable: " + e;
}
sbKey.append("xslUri:").append(xslUriForKey).append(", ");
sbKey.append("key:").append(state.key).append(", ");
sbKey.append("passThrough:").append(state.passThrough).append(", ");
sbKey.append("tidy:").append(state.tidy).append(", ");
sbKey.append("person:").append(state.person);
k.setKey(sbKey.toString());
k.setKeyValidity(new Long(System.currentTimeMillis()));
//LogService.log(LogService.DEBUG,"CWebProxy:generateKey("
// + uid + ") : cachekey=\"" + sbKey.toString() + "\"");
return k;
}
public boolean isCacheValid(Object validity,String uid)
{
if (!(validity instanceof Long))
return false;
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
{
LogService.log(LogService.ERROR,"CWebProxy:isCacheValid() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
return false;
}
else
return (System.currentTimeMillis() - ((Long)validity).longValue() < state.cacheTimeout*1000);
}
public String getContentType(String uid) {
ChannelState state = (ChannelState)stateTable.get(uid);
return state.connHolder.getContentType();
}
public InputStream getInputStream(String uid) throws IOException {
ChannelState state = (ChannelState)stateTable.get(uid);
InputStream rs = state.connHolder.getInputStream();
state.connHolder = null;
return rs;
}
public void downloadData(OutputStream out,String uid) throws IOException {
throw(new IOException("CWebProxy: donloadData method not supported - use getInputStream only"));
}
public String getName(String uid) {
ChannelState state = (ChannelState)stateTable.get(uid);
return "proxyDL";
}
public Map getHeaders(String uid) {
ChannelState state = (ChannelState)stateTable.get(uid);
try {
state.connHolder= getConnection(state.fullxmlUri, state);
}
catch (Exception e){
LogService.log(LogService.ERROR,e);
}
Map rhdrs = new HashMap();
int i = 0;
while (state.connHolder.getHeaderFieldKey(i) != null){
rhdrs.put(state.connHolder.getHeaderFieldKey(i),state.connHolder.getHeaderField(i));
i++;
}
return rhdrs;
}
public void reportDownloadError(Exception e) {
// We really should report this to the user somehow??
LogService.log(LogService.ERROR, "CWebProxy::reportDownloadError(): " + e.getMessage());
}
}
|
source/org/jasig/portal/channels/webproxy/CWebProxy.java
|
/**
* Copyright 2002 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.jasig.portal.channels.webproxy;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.StringTokenizer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.jasig.portal.ChannelCacheKey;
import org.jasig.portal.ChannelRuntimeData;
import org.jasig.portal.ChannelRuntimeProperties;
import org.jasig.portal.ChannelStaticData;
import org.jasig.portal.GeneralRenderingException;
import org.jasig.portal.IMultithreadedCacheable;
import org.jasig.portal.IMultithreadedChannel;
import org.jasig.portal.IMultithreadedMimeResponse;
import org.jasig.portal.MediaManager;
import org.jasig.portal.PortalEvent;
import org.jasig.portal.PortalException;
import org.jasig.portal.PropertiesManager;
import org.jasig.portal.ResourceMissingException;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.security.LocalConnectionContext;
import org.jasig.portal.services.LogService;
import org.jasig.portal.utils.AbsoluteURLFilter;
import org.jasig.portal.utils.CookieCutter;
import org.jasig.portal.utils.DTDResolver;
import org.jasig.portal.utils.ResourceLoader;
import org.jasig.portal.utils.XSLT;
import org.w3c.dom.Document;
import org.w3c.tidy.Tidy;
import org.xml.sax.ContentHandler;
/**
* <p>A channel which transforms and interacts with dynamic XML or HTML.
* See docs/website/developers/channel_docs/reference/CwebProxy.html
* for full documentation.
* </p>
*
* <p>Static Channel Parameters:
* Except where indicated, static parameters can be updated by equivalent
* Runtime parameters. Caching parameters can also be changed temporarily.
* Cache defaults and IPerson restrictions are loaded first from properties,
* and overridden by static data if there.
* </p>
* <ol>
* <li>"cw_xml" - a URI for the source XML document
* <li>"cw_ssl" - a URI for the corresponding .ssl (stylesheet list) file
* <li>"cw_xslTitle" - a title representing the stylesheet (optional)
* <i>If no title parameter is specified, a default
* stylesheet will be chosen according to the media</i>
* <li>"cw_xsl" - a URI for the stylesheet to use
* <i>If <code>cw_xsl</code> is supplied, <code>cw_ssl</code>
* and <code>cw_xslTitle</code> will be ignored.
* <li>"cw_passThrough" - indicates how RunTimeData is to be passed through.
* <i>If <code>cw_passThrough</code> is supplied, and not set
* to "all" or "application", additional RunTimeData
* parameters not starting with "cw_" or "upc_" will be
* passed as request parameters to the XML URI. If
* <code>cw_passThrough</code> is set to "marked", this will
* happen only if there is also a RunTimeData parameter of
* <code>cw_inChannelLink</code>. "application" is intended
* to keep application-specific links in the channel, while
* "all" should keep all links in the channel. This
* distinction is handled entirely in the URL Filters.
* <li>"cw_tidy" - output from <code>xmlUri</code> will be passed though Jtidy
* <li>"cw_info" - a URI to be called for the <code>info</code> event.
* <li>"cw_help" - a URI to be called for the <code>help</code> event.
* <li>"cw_edit" - a URI to be called for the <code>edit</code> event.
* <li>"cw_cacheDefaultMode" - Default caching mode.
* <i>May be <code>none</code> (normally don't cache), or
* <code>all</code> (cache everything).
* <li>"cw_cacheDefaultTimeout" - Default timeout in seconds.
* <li>"cw_cacheMode" - override default for this request only.
* <i>Primarily intended as a runtime parameter, but can
* used statically to override the first instance.</i>
* <li>"cw_cacheTimeout" - override default for this request only.
* <i>Primarily intended as a runtime parameter, but can
* be used statically to override the first instance.</i>
* <li>"cw_person" - IPerson attributes to pass.
* <i>A comma-separated list of IPerson attributes to
* pass to the back end application. The static data
* value will be passed on </i>all<i> requests not
* overridden by a runtime data cw_person except some
* refresh requests.</i>
* <li>"cw_personAllow" - Restrict IPerson attribute passing to this list.
* <i>A comma-separated list of IPerson attributes that
* may be passed via cw_person. An empty or non-existent
* value means use the default value from the corresponding
* property. The special value "*" means all attributes
* are allowed. The value "!*" means none are allowed.
* Static data only.</i>
* <li>"upc_localConnContext" - LocalConnectionContext implementation class.
* <i>The name of a class to use when data sent to the
* backend application needs to be modified or added
* to suit local needs. Static data only.</i>
* </ol>
* <p>Runtime Channel Parameters:</p>
* The following parameters are runtime-only.
* </p>
* <ol>
* <li>"cw_reset" - an instruction to return to reset internal variables.
* <i>The value <code>return</code> resets <code>cw_xml</code>
* to its last value before changed by button events. The
* value "reset" returns all variables to the static data
* values.</i>
* <li>"cw_download" - use download worker for this link or form
* <i>any link or form that contains this parameter will be
* handled by the download worker, if the pass-through mode
* is set to rewrite the link or form. This allows downloads
* from the proxied site to be delivered via the portal,
* primarily useful if the download requires verification
* of a session referenced by a proxied cookie</i>
*
* </ol>
* <p>This channel can be used for all XML formats with appropriate stylesheets.
* All static data parameters as well as additional runtime data parameters
* passed to this channel via HttpRequest will in turn be passed on to the
* XSLT stylesheet as stylesheet parameters. They can be read in the
* stylesheet as follows:
* <code><xsl:param
* name="yourParamName">aDefaultValue</xsl:param></code>
* </p>
* @author Andrew Draskoy, andrew@mun.ca
* @author Sarah Arnott, sarnott@mun.ca
* @version $Revision$
*/
public class CWebProxy implements IMultithreadedChannel, IMultithreadedCacheable, IMultithreadedMimeResponse
{
Map stateTable;
// to prepend to the system-wide cache key
static final String systemCacheId="org.jasig.portal.channels.webproxy.CWebProxy";
// All state variables stored here
private class ChannelState
{
private int id;
private IPerson iperson;
private String person;
private String personAllow;
private HashSet personAllow_set;
private String fullxmlUri;
private String buttonxmlUri;
private String xmlUri;
private String key;
private String passThrough;
private String tidy;
private String sslUri;
private String xslTitle;
private String xslUri;
private String infoUri;
private String helpUri;
private String editUri;
private String cacheDefaultMode;
private String cacheMode;
private String reqParameters;
private long cacheDefaultTimeout;
private long cacheTimeout;
private ChannelRuntimeData runtimeData;
private CookieCutter cookieCutter;
private URLConnection connHolder;
private LocalConnectionContext localConnContext;
private int refresh;
public ChannelState ()
{
fullxmlUri = buttonxmlUri = xmlUri = key = passThrough = sslUri = null;
xslTitle = xslUri = infoUri = helpUri = editUri = tidy = null;
id = 0;
cacheMode = null;
iperson = null;
refresh = -1;
cacheTimeout = cacheDefaultTimeout = PropertiesManager.getPropertyAsLong("org.jasig.portal.channels.webproxy.CWebProxy.cache_default_timeout");
cacheMode = cacheDefaultMode = PropertiesManager.getProperty("org.jasig.portal.channels.webproxy.CWebProxy.cache_default_mode");
personAllow = PropertiesManager.getProperty("org.jasig.portal.channels.webproxy.CWebProxy.person_allow");
runtimeData = null;
cookieCutter = new CookieCutter();
localConnContext = null;
}
}
public CWebProxy ()
{
stateTable = Collections.synchronizedMap(new HashMap());
}
/**
* Passes ChannelStaticData to the channel.
* This is done during channel instantiation time.
* see org.jasig.portal.ChannelStaticData
* @param sd channel static data
* @see ChannelStaticData
*/
public void setStaticData (ChannelStaticData sd, String uid)
{
ChannelState state = new ChannelState();
state.id = sd.getPerson().getID();
state.iperson = sd.getPerson();
state.person = sd.getParameter("cw_person");
String personAllow = sd.getParameter ("cw_personAllow");
if ( personAllow != null && (!personAllow.trim().equals("")))
state.personAllow = personAllow;
// state.personAllow could have been set by a property or static data
if ( state.personAllow != null && (!state.personAllow.trim().equals("!*")) )
{
state.personAllow_set = new HashSet();
StringTokenizer st = new StringTokenizer(state.personAllow,",");
if (st != null)
{
while ( st.hasMoreElements () ) {
String pName = st.nextToken();
if (pName!=null) {
pName = pName.trim();
if (!pName.equals(""))
state.personAllow_set.add(pName);
}
}
}
}
state.xmlUri = sd.getParameter ("cw_xml");
state.sslUri = sd.getParameter ("cw_ssl");
state.xslTitle = sd.getParameter ("cw_xslTitle");
state.xslUri = sd.getParameter ("cw_xsl");
state.fullxmlUri = sd.getParameter ("cw_xml");
state.passThrough = sd.getParameter ("cw_passThrough");
state.tidy = sd.getParameter ("cw_tidy");
state.infoUri = sd.getParameter ("cw_info");
state.helpUri = sd.getParameter ("cw_help");
state.editUri = sd.getParameter ("cw_edit");
state.key = state.xmlUri;
String cacheMode = sd.getParameter ("cw_cacheDefaultMode");
if (cacheMode != null && !cacheMode.trim().equals(""))
state.cacheDefaultMode = cacheMode;
cacheMode = sd.getParameter ("cw_cacheMode");
if (cacheMode != null && !cacheMode.trim().equals(""))
state.cacheMode = cacheMode;
else
state.cacheMode = state.cacheDefaultMode;
String cacheTimeout = sd.getParameter("cw_cacheDefaultTimeout");
if (cacheTimeout != null && !cacheTimeout.trim().equals(""))
state.cacheDefaultTimeout = Long.parseLong(cacheTimeout);
cacheTimeout = sd.getParameter("cw_cacheTimeout");
if (cacheTimeout != null && !cacheTimeout.trim().equals(""))
state.cacheTimeout = Long.parseLong(cacheTimeout);
else
state.cacheTimeout = state.cacheDefaultTimeout;
String connContext = sd.getParameter ("upc_localConnContext");
if (connContext != null && !connContext.trim().equals(""))
{
try
{
state.localConnContext = (LocalConnectionContext) Class.forName(connContext).newInstance();
state.localConnContext.init(sd);
}
catch (Exception e)
{
LogService.log(LogService.ERROR, "CWebProxy: Cannot initialize LocalConnectionContext: " + e);
}
}
stateTable.put(uid,state);
}
/**
* Passes ChannelRuntimeData to the channel.
* This function is called prior to the renderXML() call.
* @param rd channel runtime data
* @see ChannelRuntimeData
*/
public void setRuntimeData (ChannelRuntimeData rd, String uid)
{
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
LogService.log(LogService.ERROR,"CWebProxy:setRuntimeData() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
else
{
state.runtimeData = rd;
if ( rd.isEmpty() && (state.refresh != -1) ) {
// A refresh-- State remains the same.
if ( state.buttonxmlUri != null ) {
state.key = state.buttonxmlUri;
state.fullxmlUri = state.buttonxmlUri;
state.refresh = 0;
} else {
if ( state.refresh == 0 )
state.key = state.fullxmlUri;
state.fullxmlUri = state.xmlUri;
state.refresh = 1;
}
} else {
state.refresh = 0;
String xmlUri = state.runtimeData.getParameter("cw_xml");
if (xmlUri != null) {
state.xmlUri = xmlUri;
// don't need an explicit reset if a new URI is provided.
state.buttonxmlUri = null;
}
String sslUri = state.runtimeData.getParameter("cw_ssl");
if (sslUri != null)
state.sslUri = sslUri;
String xslTitle = state.runtimeData.getParameter("cw_xslTitle");
if (xslTitle != null)
state.xslTitle = xslTitle;
String xslUri = state.runtimeData.getParameter("cw_xsl");
if (xslUri != null)
state.xslUri = xslUri;
String passThrough = state.runtimeData.getParameter("cw_passThrough");
if (passThrough != null)
state.passThrough = passThrough;
String person = state.runtimeData.getParameter("cw_person");
if (person != null)
state.person = person;
String tidy = state.runtimeData.getParameter("cw_tidy");
if (tidy != null)
state.tidy = tidy;
String infoUri = state.runtimeData.getParameter("cw_info");
if (infoUri != null)
state.infoUri = infoUri;
String editUri = state.runtimeData.getParameter("cw_edit");
if (editUri != null)
state.editUri = editUri;
String helpUri = state.runtimeData.getParameter("cw_help");
if (helpUri != null)
state.helpUri = helpUri;
String cacheTimeout = state.runtimeData.getParameter("cw_cacheDefaultTimeout");
if (cacheTimeout != null)
state.cacheDefaultTimeout = Long.parseLong(cacheTimeout);
cacheTimeout = state.runtimeData.getParameter("cw_cacheTimeout");
if (cacheTimeout != null)
state.cacheTimeout = Long.parseLong(cacheTimeout);
else
state.cacheTimeout = state.cacheDefaultTimeout;
String cacheDefaultMode = state.runtimeData.getParameter("cw_cacheDefaultMode");
if (cacheDefaultMode != null) {
state.cacheDefaultMode = cacheDefaultMode;
}
String cacheMode = state.runtimeData.getParameter("cw_cacheMode");
if (cacheMode != null) {
state.cacheMode = cacheMode;
} else
state.cacheMode = state.cacheDefaultMode;
// reset is a one-time thing.
String reset = state.runtimeData.getParameter("cw_reset");
if (reset != null) {
if (reset.equalsIgnoreCase("return")) {
state.buttonxmlUri = null;
}
}
if ( state.buttonxmlUri != null ) // shouldn't happen here, but...
state.fullxmlUri = state.buttonxmlUri;
else
{
//LogService.log(LogService.DEBUG, "CWebProxy: xmlUri is " + state.xmlUri);
// pass IPerson atts independent of the value of cw_passThrough
StringBuffer newXML = new StringBuffer();
String appendchar = "";
// here add in attributes according to cw_person
if (state.person != null && state.personAllow_set != null)
{
StringTokenizer st = new StringTokenizer(state.person,",");
if (st != null)
{
while (st.hasMoreElements ())
{
String pName = st.nextToken();
if ((pName!=null)&&(!pName.trim().equals("")))
{
if ( state.personAllow.trim().equals("*") ||
state.personAllow_set.contains(pName) )
{
newXML.append(appendchar);
appendchar = "&";
newXML.append(pName);
newXML.append("=");
// note, this only gets the first one if it's a
// java.util.Vector. Should check
String pVal = (String)state.iperson.getAttribute(pName);
if (pVal != null)
newXML.append(URLEncoder.encode(pVal));
}
else {
LogService.log(LogService.INFO,
"CWebProxy: request to pass " + pName + " denied.");
}
}
}
}
}
// end cw_person code
// Is this a case where we need to pass request parameters to the xmlURI?
if ( state.passThrough != null &&
!state.passThrough.equalsIgnoreCase("none") &&
( state.passThrough.equalsIgnoreCase("all") ||
state.passThrough.equalsIgnoreCase("application") ||
rd.getParameter("cw_inChannelLink") != null ) )
{
// keyword and parameter processing
// NOTE: if both exist, only keywords are appended
String keywords = rd.getKeywords();
if (keywords != null)
{
if (appendchar.equals("&"))
newXML.append("&keywords=" + keywords);
else
newXML.append(keywords);
}
else
{
// want all runtime parameters not specific to WebProxy
Enumeration e=rd.getParameterNames ();
if (e!=null)
{
while (e.hasMoreElements ())
{
String pName = (String) e.nextElement ();
if ( !pName.startsWith("cw_") && !pName.startsWith("upc_")
&& !pName.trim().equals(""))
{
String[] value_array = rd.getParameterValues(pName);
int i = 0;
while ( i < value_array.length )
{
newXML.append(appendchar);
appendchar = "&";
newXML.append(pName);
newXML.append("=");
newXML.append(URLEncoder.encode(value_array[i++].trim()));
}
}
}
}
}
}
state.reqParameters = newXML.toString();
state.fullxmlUri = state.xmlUri;
if (!state.runtimeData.getHttpRequestMethod().equals("POST"))
{
if ((state.reqParameters!=null) && (!state.reqParameters.trim().equals("")))
{
appendchar = (state.xmlUri.indexOf('?') == -1) ? "?" : "&";
state.fullxmlUri = state.fullxmlUri+appendchar+state.reqParameters;
}
state.reqParameters = null;
}
//LogService.log(LogService.DEBUG, "CWebProxy: fullxmlUri now: " + state.fullxmlUri);
}
// set key for cache based on request parameters
// NOTE: POST requests are not idempotent and therefore are not
// retrievable from the cache
if (!state.runtimeData.getHttpRequestMethod().equals("POST"))
state.key = state.fullxmlUri;
else //generate a unique string as key
state.key = String.valueOf((new Date()).getTime());
}
}
}
/**
* Process portal events. Currently supported events are
* EDIT_BUTTON_EVENT, HELP_BUTTON_EVENT, ABOUT_BUTTON_EVENT,
* and SESSION_DONE. The button events work by changing the xmlUri.
* The new Uri's content should contain a link that will refer back
* to the old one at the end of its task.
* @param ev the event
*/
public void receiveEvent (PortalEvent ev, String uid)
{
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
LogService.log(LogService.ERROR,"CWebProxy:receiveEvent() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
else {
int evnum = ev.getEventNumber();
switch (evnum)
{
case PortalEvent.EDIT_BUTTON_EVENT:
if (state.editUri != null)
state.buttonxmlUri = state.editUri;
break;
case PortalEvent.HELP_BUTTON_EVENT:
if (state.helpUri != null)
state.buttonxmlUri = state.helpUri;
break;
case PortalEvent.ABOUT_BUTTON_EVENT:
if (state.infoUri != null)
state.buttonxmlUri = state.infoUri;
break;
case PortalEvent.SESSION_DONE:
stateTable.remove(uid);
break;
// case PortalEvent.UNSUBSCRIBE:
default:
break;
}
}
}
/**
* Acquires ChannelRuntimeProperites from the channel.
* This function may be called by the portal framework throughout the session.
* @see ChannelRuntimeProperties
*/
public ChannelRuntimeProperties getRuntimeProperties (String uid)
{
ChannelRuntimeProperties rp=new ChannelRuntimeProperties();
// determine if such channel is registered
if (stateTable.get(uid) == null)
{
rp.setWillRender(false);
LogService.log(LogService.ERROR,"CWebProxy:getRuntimeProperties() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
}
return rp;
}
/**
* Ask channel to render its content.
* @param out the SAX ContentHandler to output content to
*/
public void renderXML (ContentHandler out, String uid) throws PortalException
{
ChannelState state=(ChannelState)stateTable.get(uid);
if (state == null)
LogService.log(LogService.ERROR,"CWebProxy:renderXML() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
else
{
Document xml = null;
String tidiedXml = null;
try
{
if (state.tidy != null && state.tidy.equals("on"))
tidiedXml = getTidiedXml(state.fullxmlUri, state);
else
xml = getXml(state.fullxmlUri, state);
}
catch (Exception e)
{
throw new GeneralRenderingException ("Problem occured while rendering channel. Please restart channel.", e, false, true);
}
state.runtimeData.put("baseActionURL", state.runtimeData.getBaseActionURL());
state.runtimeData.put("downloadActionURL", state.runtimeData.getBaseWorkerURL("download"));
// Runtime data parameters are handed to the stylesheet.
// Add any static data parameters so it gets a full set of variables.
// We may wish to remove this feature since we don't need it for
// the default stylesheets now.
if (state.xmlUri != null)
state.runtimeData.put("cw_xml", state.xmlUri);
if (state.sslUri != null)
state.runtimeData.put("cw_ssl", state.sslUri);
if (state.xslTitle != null)
state.runtimeData.put("cw_xslTitle", state.xslTitle);
if (state.xslUri != null)
state.runtimeData.put("cw_xsl", state.xslUri);
if (state.passThrough != null)
state.runtimeData.put("cw_passThrough", state.passThrough);
if (state.tidy != null)
state.runtimeData.put("cw_tidy", state.tidy);
if (state.infoUri != null)
state.runtimeData.put("cw_info", state.infoUri);
if (state.helpUri != null)
state.runtimeData.put("cw_help", state.helpUri);
if (state.editUri != null)
state.runtimeData.put("cw_edit", state.editUri);
if (state.person != null)
state.runtimeData.put("cw_person", state.person);
if (state.personAllow != null)
state.runtimeData.put("cw_personAllow", state.personAllow);
XSLT xslt = XSLT.getTransformer(this, state.runtimeData.getLocales());
if (tidiedXml != null)
xslt.setXML(tidiedXml);
else
xslt.setXML(xml);
if (state.xslUri != null && (!state.xslUri.trim().equals("")))
xslt.setXSL(state.xslUri);
else
xslt.setXSL(state.sslUri, state.xslTitle, state.runtimeData.getBrowserInfo());
// Determine mime type
MediaManager mm = new MediaManager();
String media = mm.getMedia(state.runtimeData.getBrowserInfo());
String mimeType = mm.getReturnMimeType(media);
CWebProxyURLFilter filter2 = CWebProxyURLFilter.newCWebProxyURLFilter(mimeType, state.runtimeData, out);
AbsoluteURLFilter filter1 = AbsoluteURLFilter.newAbsoluteURLFilter(mimeType, state.xmlUri, filter2);
xslt.setTarget(filter1);
xslt.setStylesheetParameters(state.runtimeData);
xslt.transform();
}
}
/**
* Get the contents of a URI as a Document object. This is used if tidy
* is not set or equals 'off'.
* Also includes support for cookies.
* @param uri the URI
* @return the data pointed to by a URI as a Document object
*/
private Document getXml(String uri, ChannelState state) throws Exception
{
URLConnection urlConnect = getConnection(uri, state);
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setNamespaceAware(false);
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
DTDResolver dtdResolver = new DTDResolver();
docBuilder.setEntityResolver(dtdResolver);
return docBuilder.parse(urlConnect.getInputStream());
}
/**
* Get the contents of a URI as a String but send it through tidy first.
* Also includes support for cookies.
* @param uri the URI
* @return the data pointed to by a URI as a String
*/
private String getTidiedXml(String uri, ChannelState state) throws Exception
{
URLConnection urlConnect = getConnection(uri, state);
// get character encoding from Content-Type header
String encoding = null;
String ct = urlConnect.getContentType();
int i;
if (ct!=null && (i=ct.indexOf("charset="))!=-1)
{
encoding = ct.substring(i+8).trim();
if ((i=encoding.indexOf(";"))!=-1)
encoding = encoding.substring(0,i).trim();
if (encoding.indexOf("\"")!=-1)
encoding = encoding.substring(1,encoding.length()+1);
}
Tidy tidy = new Tidy ();
tidy.setXHTML (true);
tidy.setDocType ("omit");
tidy.setQuiet(true);
tidy.setShowWarnings(false);
tidy.setNumEntities(true);
tidy.setWord2000(true);
// If charset is specified in header, set JTidy's
// character encoding to either UTF-8, ISO-8859-1
// or ISO-2022 accordingly (NOTE that these are
// the only character encoding sets that are supported in
// JTidy). If character encoding is not specified,
// UTF-8 is the default.
if (encoding != null)
{
if (encoding.toLowerCase().equals("iso-8859-1"))
tidy.setCharEncoding(org.w3c.tidy.Configuration.LATIN1);
else if (encoding.toLowerCase().equals("iso-2022-jp"))
tidy.setCharEncoding(org.w3c.tidy.Configuration.ISO2022);
else
tidy.setCharEncoding(org.w3c.tidy.Configuration.UTF8);
}
else
{
tidy.setCharEncoding(org.w3c.tidy.Configuration.UTF8);
}
PrintWriter pw;
if ( System.getProperty("os.name").indexOf("Windows") != -1 )
{
pw = new PrintWriter(new FileOutputStream("nul"));
tidy.setErrout(pw);
}
else
{
pw = new PrintWriter(new FileOutputStream("/dev/null"));
tidy.setErrout(pw);
}
ByteArrayOutputStream stream = new ByteArrayOutputStream (1024);
BufferedOutputStream out = new BufferedOutputStream (stream);
tidy.parse (urlConnect.getInputStream(), out);
pw.close();
String tidiedXml = stream.toString();
stream.close();
out.close();
if ( tidy.getParseErrors() > 0 )
throw new GeneralRenderingException("Unable to convert input document to XHTML");
return tidiedXml;
}
private URLConnection getConnection(String uri, ChannelState state) throws Exception
{
// before making the connection, ensure all spaces in the URI are encoded
// (Note that URLEncoder.encode(String uri) cannot be used because
// this method encodes everything, including forward slashes and
// forward slashes are used for determining if the URL is
// relative or absolute)
uri = uri.trim();
if (uri.indexOf(" ") != -1)
{
StringBuffer sbuff = new StringBuffer();
int i;
while( (i= uri.indexOf(" ")) != -1)
{
sbuff.append(uri.substring(0, i));
sbuff.append("%20");
uri = uri.substring(i+1);
}
sbuff.append(uri);
uri = sbuff.toString();
}
// String.replaceAll(String,String) - since jdk 1.4
//uri = uri.replaceAll(" ", "%20");
URL url;
if (state.localConnContext != null)
url = ResourceLoader.getResourceAsURL(this.getClass(), state.localConnContext.getDescriptor(uri, state.runtimeData));
else
url = ResourceLoader.getResourceAsURL(this.getClass(), uri);
// get info from url for cookies
String domain = url.getHost().trim();
String path = url.getPath();
if ( path.indexOf("/") != -1 )
{
if (path.lastIndexOf("/") != 0)
path = path.substring(0, path.lastIndexOf("/"));
}
String port = Integer.toString(url.getPort());
//get connection
URLConnection urlConnect = url.openConnection();
String protocol = url.getProtocol();
if (protocol.equals("http") || protocol.equals("https"))
{
if (domain != null && path != null)
{
//prepare the connection by setting properties and sending data
HttpURLConnection httpUrlConnect = (HttpURLConnection) urlConnect;
httpUrlConnect.setInstanceFollowRedirects(false);
//send any cookie headers to proxied application
if(state.cookieCutter.cookiesExist())
state.cookieCutter.sendCookieHeader(httpUrlConnect, domain, path, port);
//set connection properties if request method was post
if (state.runtimeData.getHttpRequestMethod().equals("POST"))
{
if ((state.reqParameters!=null) && (!state.reqParameters.trim().equals("")))
{
httpUrlConnect.setRequestMethod("POST");
httpUrlConnect.setAllowUserInteraction(false);
httpUrlConnect.setDoOutput(true);
}
}
//send local data, if required
//can call getOutputStream in sendLocalData (ie. to send post params)
//(getOutputStream can be called twice on an HttpURLConnection)
if (state.localConnContext != null)
{
try
{
state.localConnContext.sendLocalData(httpUrlConnect, state.runtimeData);
}
catch (Exception e)
{
LogService.log(LogService.ERROR, "CWebProxy: Unable to send data through " + state.runtimeData.getParameter("upc_localConnContext") + ": " + e.getMessage());
}
}
//send the request parameters by post, if required
//at this point, set or send methods cannot be called on the connection
//object (they must be called before sendLocalData)
if (state.runtimeData.getHttpRequestMethod().equals("POST")){
if ((state.reqParameters!=null) && (!state.reqParameters.trim().equals(""))){
PrintWriter post = new PrintWriter(httpUrlConnect.getOutputStream());
post.print(state.reqParameters);
post.flush();
post.close();
state.reqParameters=null;
}
}
//receive cookie headers
state.cookieCutter.storeCookieHeader(httpUrlConnect, domain, path, port);
int status = httpUrlConnect.getResponseCode();
String location = httpUrlConnect.getHeaderField("Location");
switch (status)
{
case HttpURLConnection.HTTP_NOT_FOUND:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 404: Not Found");
case HttpURLConnection.HTTP_FORBIDDEN:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 403: Forbidden");
case HttpURLConnection.HTTP_INTERNAL_ERROR:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 500: Internal Server Error");
case HttpURLConnection.HTTP_NO_CONTENT:
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 204: No Content");
/*
* Note: these cases apply to http status codes 302 and 303
* this will handle automatic redirection to a new GET URL
*/
case HttpURLConnection.HTTP_MOVED_TEMP:
httpUrlConnect.disconnect();
httpUrlConnect = (HttpURLConnection) getConnection(location,state);
break;
case HttpURLConnection.HTTP_SEE_OTHER:
httpUrlConnect.disconnect();
httpUrlConnect = (HttpURLConnection) getConnection(location,state);
break;
/*
* Note: this cases apply to http status code 301
* it will handle the automatic redirection of GET requests.
* The spec calls for a POST redirect to be verified manually by the user
* Rather than bypass this security restriction, we will throw an exception
*/
case HttpURLConnection.HTTP_MOVED_PERM:
if (state.runtimeData.getHttpRequestMethod().equals("GET")){
httpUrlConnect.disconnect();
httpUrlConnect = (HttpURLConnection) getConnection(location,state);
}
else {
throw new ResourceMissingException
(httpUrlConnect.getURL().toExternalForm(),
"", "HTTP Status-Code 301: POST Redirection currently not supported");
}
break;
default:
break;
}
return (URLConnection) httpUrlConnect;
}
}
return urlConnect;
}
public ChannelCacheKey generateKey(String uid)
{
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
{
LogService.log(LogService.ERROR,"CWebProxy:generateKey() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
return null;
}
if ( state.cacheMode.equalsIgnoreCase("none") )
return null;
// else if http see first if caching is on or off. if it's on,
// store the validity time in the state, cache it, and further
// resolve later with isValid.
// check cache-control, no-cache, must-revalidate, max-age,
// Date & Expires, expiry in past
// for 1.0 check pragma for no-cache
// add a warning to docs about not a full http 1.1 impl.
ChannelCacheKey k = new ChannelCacheKey();
StringBuffer sbKey = new StringBuffer(1024);
// Only INSTANCE scope is currently supported.
k.setKeyScope(ChannelCacheKey.INSTANCE_KEY_SCOPE);
sbKey.append("sslUri:").append(state.sslUri).append(", ");
// xslUri may either be specified as a parameter to this channel
// or we will get it by looking in the stylesheet list file
String xslUriForKey = state.xslUri;
try {
if (xslUriForKey == null) {
String sslUri = ResourceLoader.getResourceAsURLString(this.getClass(), state.sslUri);
xslUriForKey = XSLT.getStylesheetURI(sslUri, state.runtimeData.getBrowserInfo());
}
} catch (Exception e) {
xslUriForKey = "Not attainable: " + e;
}
sbKey.append("xslUri:").append(xslUriForKey).append(", ");
sbKey.append("key:").append(state.key).append(", ");
sbKey.append("passThrough:").append(state.passThrough).append(", ");
sbKey.append("tidy:").append(state.tidy).append(", ");
sbKey.append("person:").append(state.person);
k.setKey(sbKey.toString());
k.setKeyValidity(new Long(System.currentTimeMillis()));
//LogService.log(LogService.DEBUG,"CWebProxy:generateKey("
// + uid + ") : cachekey=\"" + sbKey.toString() + "\"");
return k;
}
public boolean isCacheValid(Object validity,String uid)
{
if (!(validity instanceof Long))
return false;
ChannelState state = (ChannelState)stateTable.get(uid);
if (state == null)
{
LogService.log(LogService.ERROR,"CWebProxy:isCacheValid() : attempting to access a non-established channel! setStaticData() hasn't been called on uid=\""+uid+"\"");
return false;
}
else
return (System.currentTimeMillis() - ((Long)validity).longValue() < state.cacheTimeout*1000);
}
public String getContentType(String uid) {
ChannelState state = (ChannelState)stateTable.get(uid);
return state.connHolder.getContentType();
}
public InputStream getInputStream(String uid) throws IOException {
ChannelState state = (ChannelState)stateTable.get(uid);
InputStream rs = state.connHolder.getInputStream();
state.connHolder = null;
return rs;
}
public void downloadData(OutputStream out,String uid) throws IOException {
throw(new IOException("CWebProxy: donloadData method not supported - use getInputStream only"));
}
public String getName(String uid) {
ChannelState state = (ChannelState)stateTable.get(uid);
return "proxyDL";
}
public Map getHeaders(String uid) {
ChannelState state = (ChannelState)stateTable.get(uid);
try {
state.connHolder= getConnection(state.fullxmlUri, state);
}
catch (Exception e){
LogService.log(LogService.ERROR,e);
}
Map rhdrs = new HashMap();
int i = 0;
while (state.connHolder.getHeaderFieldKey(i) != null){
rhdrs.put(state.connHolder.getHeaderFieldKey(i),state.connHolder.getHeaderField(i));
i++;
}
return rhdrs;
}
public void reportDownloadError(Exception e) {
// We really should report this to the user somehow??
LogService.log(LogService.ERROR, "CWebProxy::reportDownloadError(): " + e.getMessage());
}
}
|
Included URL of proxied page in error message.
git-svn-id: 477788cc2a8229a747c5b8073e47c1d0f6ec0604@8084 f5dbab47-78f9-eb45-b975-e544023573eb
|
source/org/jasig/portal/channels/webproxy/CWebProxy.java
|
Included URL of proxied page in error message.
|
|
Java
|
apache-2.0
|
4be4c4a843c3b60a5e8ee699bb9c00f47d697227
| 0
|
shannah/cn1-teavm-builds,mpoindexter/teavm,mpoindexter/teavm,mpoindexter/teavm,shannah/cn1-teavm-builds,jtulach/teavm,konsoletyper/teavm,avdim/teavm,jtulach/teavm,shannah/cn1-teavm-builds,konsoletyper/teavm,jtulach/teavm,jtulach/teavm,shannah/cn1-teavm-builds,shannah/cn1-teavm-builds,avdim/teavm,konsoletyper/teavm,avdim/teavm,mpoindexter/teavm,konsoletyper/teavm,avdim/teavm,konsoletyper/teavm
|
/*
* Copyright 2014 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.classlib.java.util;
import org.teavm.classlib.java.io.TSerializable;
import org.teavm.classlib.java.lang.*;
public class TTreeMap<K, V> extends TAbstractMap<K, V> implements TCloneable, TSerializable, TSortedMap<K, V> {
class TreeNode implements Entry<K, V> {
K key;
V value;
TreeNode left;
TreeNode right;
int height = 1;
int size = 1;
public TreeNode(K key) {
this.key = key;
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
@Override
public V setValue(V value) {
V old = this.value;
this.value = value;
return old;
}
public TreeNode balance() {
int factor = factor();
if (factor == 2) {
if (right.factor() < 0) {
right = right.rotateRight();
}
return rotateLeft();
} else if (factor == -2) {
if (left.factor() > 0) {
left = left.rotateLeft();
}
return rotateRight();
} else {
return this;
}
}
public int factor() {
return (right != null ? right.height : 0) - (left != null ? left.height : 0);
}
public TreeNode rotateRight() {
TreeNode left = this.left;
this.left = left.right;
left.right = this;
fix();
left.fix();
return left;
}
public TreeNode rotateLeft() {
TreeNode right = this.right;
this.right = right.left;
right.left = this;
fix();
right.fix();
return right;
}
public void fix() {
height = Math.max(right != null ? right.height : 0, left != null ? left.height : 0) + 1;
size = 1;
if (left != null) {
size += left.size;
}
if (right != null) {
size += right.size;
}
}
}
TreeNode root;
private TComparator<? super K> comparator;
private TComparator<? super K> originalComparator;
private int modCount = 0;
public TTreeMap(TComparator<? super K> comparator) {
this.originalComparator = comparator;
if (comparator == null) {
comparator = new TComparator<Object>() {
@SuppressWarnings("unchecked") @Override public int compare(Object o1, Object o2) {
return o1 != null ? ((TComparable<Object>)o1).compareTo(o2) :
((TComparable<Object>)o2).compareTo(o1);
}
};
}
this.comparator = comparator;
}
@Override
public V get(Object key) {
TreeNode node = findNode(key);
return node != null ? node.value : null;
}
@Override
public V put(K key, V value) {
root = getOrCreateNode(root, key);
TreeNode node = findNode(key);
V old = node.value;
node.value = value;
modCount++;
return old;
}
@Override
public V remove(Object key) {
TreeNode node = findNode(key);
if (node == null) {
return null;
}
root = deleteNode(root, key);
modCount++;
return node.value;
}
@Override
public void clear() {
root = null;
modCount++;
}
@Override
public boolean isEmpty() {
return root == null;
}
@Override
public boolean containsKey(Object key) {
return findNode(key) != null;
}
TreeNode findNode(Object key) {
TreeNode node = root;
while (node != null) {
@SuppressWarnings("unchecked")
int cmp = comparator.compare((K)key, node.key);
if (cmp == 0) {
return node;
} else if (cmp < 0) {
node = node.left;
} else {
node = node.right;
}
}
return null;
}
private TreeNode getOrCreateNode(TreeNode root, K key) {
if (root == null) {
return new TreeNode(key);
}
int cmp = comparator.compare(key, root.key);
if (cmp == 0) {
return root;
} else if (cmp < 0) {
root.left = getOrCreateNode(root.left, key);
} else {
root.right = getOrCreateNode(root.right, key);
}
return root.balance();
}
private TreeNode deleteNode(TreeNode root, Object key) {
if (root == null) {
return null;
}
@SuppressWarnings("unchecked")
int cmp = comparator.compare((K)key, root.key);
if (cmp < 0) {
root.left = deleteNode(root.left, key);
} else if (cmp > 0) {
root.right = deleteNode(root.right, key);
} else if (root.right == null) {
return root.left;
} else {
TreeNode left = root.left;
TreeNode right = root.right;
TreeNode min = right;
Object[] pathToMin = new Object[right.height];
int minDepth = 0;
while (min.left != null) {
pathToMin[minDepth++] = min;
min = min.left;
}
right = min.right;
while (minDepth >= 0) {
@SuppressWarnings("unchecked")
TreeNode node = (TreeNode)pathToMin[--minDepth];
node.left = right;
right = node;
node.balance();
}
min.right = right;
min.left = left;
root = min;
}
return root.balance();
}
@Override
public TSet<Entry<K, V>> entrySet() {
return new EntrySet(new Object[0], null);
}
@Override
public TComparator<? super K> comparator() {
return originalComparator;
}
@Override
public TSortedMap<K, V> subMap(K fromKey, K toKey) {
return null;
}
@Override
public TSortedMap<K, V> headMap(K toKey) {
return null;
}
@Override
public TSortedMap<K, V> tailMap(K fromKey) {
return null;
}
@Override
public K firstKey() {
return firstEntry().key;
}
@Override
public K lastKey() {
return lastEntry().key;
}
private TreeNode firstEntry() {
if (isEmpty()) {
throw new TNoSuchElementException();
}
TreeNode node = root;
while (node.left != null) {
node = node.left;
}
return node;
}
private TreeNode lastEntry() {
if (isEmpty()) {
throw new TNoSuchElementException();
}
TreeNode node = root;
while (node.left != null) {
node = node.left;
}
return node;
}
@Override
public int size() {
return root != null ? root.size : 0;
}
private class EntrySet extends TAbstractSet<Entry<K, V>> {
private Object[] path;
private TreeNode from;
private TreeNode to;
@SuppressWarnings("unchecked")
public EntrySet(Object[] path, TreeNode to) {
this.path = path;
this.from = this.path.length > 0 ? (TreeNode)this.path[this.path.length - 1] : null;
this.to = to;
}
@Override
public int size() {
int size = TTreeMap.this.size();
if (from != null && from.left != null) {
size -= from.left.size;
}
if (to != null) {
size -= 1;
if (to.right != null) {
size -= to.right.size;
}
}
return size;
}
@Override
public TIterator<Entry<K, V>> iterator() {
return null;
}
}
private class EntryIterator implements TIterator<Entry<K, V>> {
private int modCount = TTreeMap.this.modCount;
private Object[] path = new Object[root != null ? root.height : 0];
private TreeNode last;
private TreeNode to;
private int pathLength;
public EntryIterator(Object[] path, TreeNode to) {
TreeNode node = root;
while (node != null) {
path[pathLength++] = node;
node = node.left;
}
}
@Override public boolean hasNext() {
return pathLength > 0;
}
@SuppressWarnings("unchecked") @Override public Entry<K, V> next() {
if (modCount != TTreeMap.this.modCount) {
throw new TConcurrentModificationException();
}
if (pathLength == 0) {
throw new TNoSuchElementException();
}
TreeNode node = (TreeNode)path[--pathLength];
last = node;
if (node.right != null) {
node = node.right;
while (node != null) {
path[pathLength++] = node;
node = node.left;
}
}
return last;
}
@Override public void remove() {
if (last == null) {
throw new TNoSuchElementException();
}
deleteNode(root, last);
last = null;
}
}
}
|
teavm-classlib/src/main/java/org/teavm/classlib/java/util/TTreeMap.java
|
/*
* Copyright 2014 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.classlib.java.util;
import org.teavm.classlib.java.io.TSerializable;
import org.teavm.classlib.java.lang.*;
import org.teavm.javascript.ni.Rename;
public class TTreeMap<K, V> extends TAbstractMap<K, V> implements TSortedMap<K, V>, TCloneable, TSerializable {
transient int size;
private TComparator<? super K> comparator;
transient int modCount;
transient TSet<Entry<K, V>> entrySet;
transient Node<K, V> root;
class MapEntry extends TObject implements Entry<K, V>, TCloneable {
final int offset;
final Node<K, V> node;
final K key;
MapEntry(Node<K, V> node, int offset) {
this.node = node;
this.offset = offset;
key = node.keys[offset];
}
@Override
public Object clone() {
try {
return super.clone();
} catch (TCloneNotSupportedException e) {
return null;
}
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object instanceof Entry) {
Entry<?, ?> entry = (Entry<?, ?>) object;
V value = getValue();
return (key == null ? entry.getKey() == null : key.equals(entry.getKey()))
&& (value == null ? entry.getValue() == null : value.equals(entry.getValue()));
}
return false;
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
if (node.keys[offset] == key) {
return node.values[offset];
}
if (containsKey(key)) {
return get(key);
}
throw new TIllegalStateException();
}
@Override
public int hashCode() {
V value = getValue();
return (key == null ? 0 : key.hashCode())
^ (value == null ? 0 : value.hashCode());
}
@Override
public V setValue(V object) {
if (node.keys[offset] == key) {
V res = node.values[offset];
node.values[offset] = object;
return res;
}
if (containsKey(key)) {
return put(key, object);
}
throw new TIllegalStateException();
}
@Override
public String toString() {
return key + "=" + getValue();
}
}
static class Node <K, V> extends TObject implements TCloneable {
static final int NODE_SIZE = 64;
Node<K, V> prev, next;
Node<K, V> parent, left, right;
V[] values;
K[] keys;
int left_idx = 0;
int right_idx = -1;
int size = 0;
boolean color;
@SuppressWarnings("unchecked")
public Node() {
keys = (K[]) new Object[NODE_SIZE];
values = (V[]) new Object[NODE_SIZE];
}
@SuppressWarnings("unchecked")
Node<K, V> clone(Node<K, V> parent) throws TCloneNotSupportedException {
Node<K, V> clone = (Node<K, V>) super.clone();
clone.keys = TArrays.copyOf(keys, keys.length);
clone.values = TArrays.copyOf(values, values.length);
clone.left_idx = left_idx;
clone.right_idx = right_idx;
clone.parent = parent;
if (left != null) {
clone.left = left.clone(clone);
}
if (right != null) {
clone.right = right.clone(clone);
}
clone.prev = null;
clone.next = null;
return clone;
}
}
@SuppressWarnings("unchecked")
private static <T> TComparable<T> toComparable(T obj) {
if (obj == null) {
throw new TNullPointerException();
}
return (TComparable<T>) obj;
}
static class AbstractMapIterator <K,V> {
TTreeMap<K, V> backingMap;
int expectedModCount;
Node<K, V> node;
Node<K, V> lastNode;
int offset;
int lastOffset;
AbstractMapIterator(TTreeMap<K, V> map, Node<K, V> startNode, int startOffset) {
backingMap = map;
expectedModCount = map.modCount;
node = startNode;
offset = startOffset;
}
AbstractMapIterator(TTreeMap<K, V> map, Node<K, V> startNode) {
this(map, startNode, startNode != null ? startNode.right_idx - startNode.left_idx : 0);
}
AbstractMapIterator(TTreeMap<K, V> map) {
this(map, minimum(map.root));
}
public boolean hasNext() {
return node != null;
}
final void makeNext() {
if (expectedModCount != backingMap.modCount) {
throw new TConcurrentModificationException();
} else if (node == null) {
throw new TNoSuchElementException();
}
lastNode = node;
lastOffset = offset;
if (offset != 0) {
offset--;
} else {
node = node.next;
if (node != null) {
offset = node.right_idx - node.left_idx;
}
}
}
final public void remove() {
if (expectedModCount == backingMap.modCount) {
if (lastNode != null) {
int idx = lastNode.right_idx - lastOffset;
backingMap.removeFromIterator(lastNode, idx);
lastNode = null;
expectedModCount++;
} else {
throw new TIllegalStateException();
}
} else {
throw new TConcurrentModificationException();
}
}
}
static class UnboundedEntryIterator <K, V> extends AbstractMapIterator<K, V>
implements TIterator<Entry<K, V>> {
UnboundedEntryIterator(TTreeMap<K, V> map, Node<K, V> startNode, int startOffset) {
super(map, startNode, startOffset);
}
UnboundedEntryIterator(TTreeMap<K, V> map) {
super(map);
}
@Override
public Entry<K, V> next() {
makeNext();
int idx = lastNode.right_idx - lastOffset;
return backingMap.new MapEntry(lastNode, idx);
}
}
static class UnboundedKeyIterator <K, V> extends AbstractMapIterator<K, V>
implements TIterator<K> {
UnboundedKeyIterator(TTreeMap<K, V> map, Node<K, V> startNode, int startOffset) {
super(map, startNode, startOffset);
}
UnboundedKeyIterator(TTreeMap<K, V> map) {
super(map);
}
@Override
public K next() {
makeNext();
return lastNode.keys[lastNode.right_idx - lastOffset];
}
}
static class UnboundedValueIterator <K, V> extends AbstractMapIterator<K, V>
implements TIterator<V> {
UnboundedValueIterator(TTreeMap<K, V> map, Node<K, V> startNode, int startOffset) {
super(map, startNode, startOffset);
}
UnboundedValueIterator(TTreeMap<K, V> map) {
super(map);
}
@Override
public V next() {
makeNext();
return lastNode.values[lastNode.right_idx - lastOffset];
}
}
static class BoundedMapIterator <K, V> extends AbstractMapIterator<K, V> {
Node<K, V> finalNode;
int finalOffset;
BoundedMapIterator(Node<K, V> startNode, int startOffset, TTreeMap<K, V> map,
Node<K, V> finalNode, int finalOffset) {
super(map, finalNode == null ? null : startNode, startOffset);
this.finalNode = finalNode;
this.finalOffset = finalOffset;
}
BoundedMapIterator(Node<K, V> startNode, TTreeMap<K, V> map,
Node<K, V> finalNode, int finalOffset) {
this(startNode, startNode != null ?
startNode.right_idx - startNode.left_idx : 0,
map, finalNode, finalOffset);
}
BoundedMapIterator(Node<K, V> startNode, int startOffset,
TTreeMap<K, V> map, Node<K, V> finalNode) {
this(startNode, startOffset, map, finalNode,
finalNode.right_idx - finalNode.left_idx);
}
void makeBoundedNext() {
makeNext();
if (lastNode == finalNode && lastOffset == finalOffset) {
node = null;
}
}
}
static class BoundedEntryIterator <K, V> extends BoundedMapIterator<K, V>
implements TIterator<Entry<K, V>> {
public BoundedEntryIterator(Node<K, V> startNode, int startOffset, TTreeMap<K, V> map,
Node<K, V> finalNode, int finalOffset) {
super(startNode, startOffset, map, finalNode, finalOffset);
}
@Override
public Entry<K, V> next() {
makeBoundedNext();
int idx = lastNode.right_idx - lastOffset;
return backingMap.new MapEntry(lastNode, idx);
}
}
static class BoundedKeyIterator <K, V> extends BoundedMapIterator<K, V>
implements TIterator<K> {
public BoundedKeyIterator(Node<K, V> startNode, int startOffset, TTreeMap<K, V> map,
Node<K, V> finalNode, int finalOffset) {
super(startNode, startOffset, map, finalNode, finalOffset);
}
@Override
public K next() {
makeBoundedNext();
return lastNode.keys[lastNode.right_idx - lastOffset];
}
}
static class BoundedValueIterator <K, V> extends BoundedMapIterator<K, V>
implements TIterator<V> {
public BoundedValueIterator(Node<K, V> startNode, int startOffset, TTreeMap<K, V> map,
Node<K, V> finalNode, int finalOffset) {
super(startNode, startOffset, map, finalNode, finalOffset);
}
@Override
public V next() {
makeBoundedNext();
return lastNode.values[lastNode.right_idx - lastOffset];
}
}
static final class SubMap <K,V> extends TAbstractMap<K, V>
implements TSortedMap<K, V>, TSerializable {
private TTreeMap<K, V> backingMap;
boolean hasStart, hasEnd;
K startKey, endKey;
transient TSet<Entry<K, V>> entrySet = null;
transient int firstKeyModCount = -1;
transient int lastKeyModCount = -1;
transient Node<K, V> firstKeyNode;
transient int firstKeyIndex;
transient Node<K, V> lastKeyNode;
transient int lastKeyIndex;
SubMap(K start, TTreeMap<K, V> map) {
backingMap = map;
hasStart = true;
startKey = start;
}
SubMap(K start, TTreeMap<K, V> map, K end) {
backingMap = map;
hasStart = hasEnd = true;
startKey = start;
endKey = end;
}
SubMap(TTreeMap<K, V> map, K end) {
backingMap = map;
hasEnd = true;
endKey = end;
}
private void checkRange(K key) {
TComparator<? super K> cmp = backingMap.comparator;
if (cmp == null) {
TComparable<K> object = toComparable(key);
if (hasStart && object.compareTo(startKey) < 0) {
throw new TIllegalArgumentException();
}
if (hasEnd && object.compareTo(endKey) > 0) {
throw new TIllegalArgumentException();
}
} else {
if (hasStart
&& backingMap.comparator().compare(key, startKey) < 0) {
throw new TIllegalArgumentException();
}
if (hasEnd && backingMap.comparator().compare(key, endKey) > 0) {
throw new TIllegalArgumentException();
}
}
}
private boolean isInRange(K key) {
TComparator<? super K> cmp = backingMap.comparator;
if (cmp == null) {
TComparable<K> object = toComparable(key);
if (hasStart && object.compareTo(startKey) < 0) {
return false;
}
if (hasEnd && object.compareTo(endKey) >= 0) {
return false;
}
} else {
if (hasStart && cmp.compare(key, startKey) < 0) {
return false;
}
if (hasEnd && cmp.compare(key, endKey) >= 0) {
return false;
}
}
return true;
}
private boolean checkUpperBound(K key) {
if (hasEnd) {
TComparator<? super K> cmp = backingMap.comparator;
if (cmp == null) {
return (toComparable(key).compareTo(endKey) < 0);
}
return (cmp.compare(key, endKey) < 0);
}
return true;
}
private boolean checkLowerBound(K key) {
if (hasStart) {
TComparator<? super K> cmp = backingMap.comparator;
if (cmp == null) {
return (toComparable(key).compareTo(startKey) >= 0);
}
return (cmp.compare(key, startKey) >= 0);
}
return true;
}
@Override
public TComparator<? super K> comparator() {
return backingMap.comparator();
}
@SuppressWarnings("unchecked")
@Override
public boolean containsKey(Object key) {
if (isInRange((K) key)) {
return backingMap.containsKey(key);
}
return false;
}
@Override
public void clear() {
keySet().clear();
}
@Override
public boolean containsValue(Object value) {
TIterator<V> it = values().iterator();
if (value != null) {
while (it.hasNext()) {
if (value.equals(it.next())) {
return true;
}
}
} else {
while (it.hasNext()) {
if (it.next() == null) {
return true;
}
}
}
return false;
}
@Override
public TSet<Entry<K, V>> entrySet() {
if (entrySet == null) {
entrySet = new SubMapEntrySet<>(this);
}
return entrySet;
}
private void setFirstKey() {
if (firstKeyModCount == backingMap.modCount) {
return;
}
TComparable<K> object = backingMap.comparator == null ? toComparable(startKey) : null;
K key = startKey;
Node<K, V> node = backingMap.root;
Node<K, V> foundNode = null;
int foundIndex = -1;
TOP_LOOP:
while (node != null) {
K[] keys = node.keys;
int left_idx = node.left_idx;
int result = backingMap.cmp(object, key, keys[left_idx]);
if (result < 0) {
foundNode = node;
foundIndex = left_idx;
node = node.left;
} else if (result == 0) {
foundNode = node;
foundIndex = left_idx;
break;
} else {
int right_idx = node.right_idx;
if (left_idx != right_idx) {
result = backingMap.cmp(object, key, keys[right_idx]);
}
if (result > 0) {
node = node.right;
} else if (result == 0) {
foundNode = node;
foundIndex = right_idx;
break;
} else { /*search in node*/
foundNode = node;
foundIndex = right_idx;
int low = left_idx + 1, mid = 0, high = right_idx - 1;
while (low <= high) {
mid = (low + high) >>> 1;
result = backingMap.cmp(object, key, keys[mid]);
if (result > 0) {
low = mid + 1;
} else if (result == 0) {
foundNode = node;
foundIndex = mid;
break TOP_LOOP;
} else {
foundNode = node;
foundIndex = mid;
high = mid - 1;
}
}
break TOP_LOOP;
}
}
}
if (foundNode != null && !checkUpperBound(foundNode.keys[foundIndex])) {
foundNode = null;
}
firstKeyNode = foundNode;
firstKeyIndex = foundIndex;
firstKeyModCount = backingMap.modCount;
}
@Override
public K firstKey() {
if (backingMap.size > 0) {
if (!hasStart) {
Node<K, V> node = minimum(backingMap.root);
if (node != null && checkUpperBound(node.keys[node.left_idx])) {
return node.keys[node.left_idx];
}
} else {
setFirstKey();
if (firstKeyNode != null) {
return firstKeyNode.keys[firstKeyIndex];
}
}
}
throw new TNoSuchElementException();
}
@SuppressWarnings("unchecked")
@Override
public V get(Object key) {
if (isInRange((K) key)) {
return backingMap.get(key);
}
return null;
}
@Override
public TSortedMap<K, V> headMap(K endKey) {
checkRange(endKey);
if (hasStart) {
return new SubMap<>(startKey, backingMap, endKey);
}
return new SubMap<>(backingMap, endKey);
}
@Override
public boolean isEmpty() {
if (hasStart) {
setFirstKey();
return firstKeyNode == null;
} else {
setLastKey();
return lastKeyNode == null;
}
}
@Override
public TSet<K> keySet() {
if (cachedKeySet == null) {
cachedKeySet = new SubMapKeySet<>(this);
}
return cachedKeySet;
}
private void setLastKey() {
if (lastKeyModCount == backingMap.modCount) {
return;
}
TComparable<K> object = backingMap.comparator == null ? toComparable(endKey) : null;
K key = endKey;
Node<K, V> node = backingMap.root;
Node<K, V> foundNode = null;
int foundIndex = -1;
TOP_LOOP:
while (node != null) {
K[] keys = node.keys;
int left_idx = node.left_idx;
int result = backingMap.cmp(object, key, keys[left_idx]);
if (result <= 0) {
node = node.left;
} else {
int right_idx = node.right_idx;
if (left_idx != right_idx) {
result = backingMap.cmp(object, key, keys[right_idx]);
}
if (result > 0) {
foundNode = node;
foundIndex = right_idx;
node = node.right;
} else if (result == 0) {
if (node.left_idx == node.right_idx) {
foundNode = node.prev;
if (foundNode != null) {
foundIndex = foundNode.right_idx - 1;
}
} else {
foundNode = node;
foundIndex = right_idx - 1;
}
break;
} else { /*search in node*/
foundNode = node;
foundIndex = left_idx;
int low = left_idx + 1, mid = 0, high = right_idx - 1;
while (low <= high) {
mid = (low + high) >>> 1;
result = backingMap.cmp(object, key, keys[mid]);
if (result > 0) {
foundNode = node;
foundIndex = mid;
low = mid + 1;
} else if (result == 0) {
foundNode = node;
foundIndex = mid - 1;
break TOP_LOOP;
} else {
high = mid - 1;
}
}
break TOP_LOOP;
}
}
}
if (foundNode != null && !checkLowerBound(foundNode.keys[foundIndex])) {
foundNode = null;
}
lastKeyNode = foundNode;
lastKeyIndex = foundIndex;
lastKeyModCount = backingMap.modCount;
}
@Override
public K lastKey() {
if (backingMap.size > 0) {
if (!hasEnd) {
Node<K, V> node = maximum(backingMap.root);
if (node != null && checkLowerBound(node.keys[node.right_idx])) {
return node.keys[node.right_idx];
}
} else {
setLastKey();
if (lastKeyNode != null) {
return lastKeyNode.keys[lastKeyIndex];
}
}
}
throw new TNoSuchElementException();
}
@Override
public V put(K key, V value) {
if (isInRange(key)) {
return backingMap.put(key, value);
}
throw new TIllegalArgumentException();
}
@SuppressWarnings("unchecked")
@Override
public V remove(Object key) {
if (isInRange((K) key)) {
return backingMap.remove(key);
}
return null;
}
@Override
public TSortedMap<K, V> subMap(K startKey, K endKey) {
checkRange(startKey);
checkRange(endKey);
TComparator<? super K> c = backingMap.comparator();
if (c == null) {
if (toComparable(startKey).compareTo(endKey) <= 0) {
return new SubMap<>(startKey, backingMap, endKey);
}
} else {
if (c.compare(startKey, endKey) <= 0) {
return new SubMap<>(startKey, backingMap, endKey);
}
}
throw new IllegalArgumentException();
}
@Override
public TSortedMap<K, V> tailMap(K startKey) {
checkRange(startKey);
if (hasEnd) {
return new SubMap<>(startKey, backingMap, endKey);
}
return new SubMap<>(startKey, backingMap);
}
@Override
public TCollection<V> values() {
if (cachedValues == null) {
cachedValues = new SubMapValuesCollection<>(this);
}
return cachedValues;
}
@Override
public int size() {
Node<K, V> from, to;
int fromIndex, toIndex;
if (hasStart) {
setFirstKey();
from = firstKeyNode;
fromIndex = firstKeyIndex;
} else {
from = minimum(backingMap.root);
fromIndex = from == null ? 0 : from.left_idx;
}
if (from == null) {
return 0;
}
if (hasEnd) {
setLastKey();
to = lastKeyNode;
toIndex = lastKeyIndex;
} else {
to = maximum(backingMap.root);
toIndex = to == null ? 0 : to.right_idx;
}
if (to == null) {
return 0;
}
if (from == to) {
return toIndex - fromIndex + 1;
}
int sum = 0;
while (from != to) {
sum += (from.right_idx - fromIndex + 1);
from = from.next;
fromIndex = from.left_idx;
}
return sum + toIndex - fromIndex + 1;
}
}
static class SubMapEntrySet<K,V> extends TAbstractSet<Entry<K, V>> {
SubMap<K, V> subMap;
SubMapEntrySet(SubMap<K, V> map) {
subMap = map;
}
@Override
public boolean isEmpty() {
return subMap.isEmpty();
}
@Override
public TIterator<Entry<K, V>> iterator() {
Node<K, V> from;
int fromIndex;
if (subMap.hasStart) {
subMap.setFirstKey();
from = subMap.firstKeyNode;
fromIndex = subMap.firstKeyIndex;
} else {
from = minimum(subMap.backingMap.root);
fromIndex = from != null ? from.left_idx : 0;
}
if (!subMap.hasEnd) {
return new UnboundedEntryIterator<>(subMap.backingMap, from,
from == null ? 0 : from.right_idx - fromIndex);
}
subMap.setLastKey();
Node<K, V> to = subMap.lastKeyNode;
int toIndex = subMap.lastKeyIndex;
return new BoundedEntryIterator<>(from, from == null ? 0 : from.right_idx - fromIndex,
subMap.backingMap, to, to == null ? 0 : to.right_idx - toIndex);
}
@Override
public int size() {
return subMap.size();
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(Object object) {
if (object instanceof Entry) {
Entry<K, V> entry = (Entry<K, V>) object;
K key = entry.getKey();
if (subMap.isInRange(key)) {
V v1 = subMap.get(key), v2 = entry.getValue();
return v1 == null ? ( v2 == null && subMap.containsKey(key) ) : v1.equals(v2);
}
}
return false;
}
@Override
public boolean remove(Object object) {
if (contains(object)) {
@SuppressWarnings("unchecked")
Entry<K, V> entry = (Entry<K, V>) object;
K key = entry.getKey();
subMap.remove(key);
return true;
}
return false;
}
}
static class SubMapKeySet <K,V> extends TAbstractSet<K> {
SubMap<K, V> subMap;
SubMapKeySet(SubMap<K, V> map) {
subMap = map;
}
@Override
public boolean contains(Object object) {
return subMap.containsKey(object);
}
@Override
public boolean isEmpty() {
return subMap.isEmpty();
}
@Override
public int size() {
return subMap.size();
}
@Override
public boolean remove(Object object) {
if (subMap.containsKey(object)) {
subMap.remove(object);
return true;
}
return false;
}
@Override
public TIterator<K> iterator() {
Node<K, V> from;
int fromIndex;
if (subMap.hasStart) {
subMap.setFirstKey();
from = subMap.firstKeyNode;
fromIndex = subMap.firstKeyIndex;
} else {
from = minimum(subMap.backingMap.root);
fromIndex = from != null ? from.left_idx : 0;
}
if (!subMap.hasEnd) {
return new UnboundedKeyIterator<>(subMap.backingMap, from,
from == null ? 0 : from.right_idx - fromIndex);
}
subMap.setLastKey();
Node<K, V> to = subMap.lastKeyNode;
int toIndex = subMap.lastKeyIndex;
return new BoundedKeyIterator<>(from,
from == null ? 0 : from.right_idx - fromIndex, subMap.backingMap, to,
to == null ? 0 : to.right_idx - toIndex);
}
}
static class SubMapValuesCollection <K,V> extends TAbstractCollection<V> {
SubMap<K, V> subMap;
public SubMapValuesCollection(SubMap<K, V> subMap) {
this.subMap = subMap;
}
@Override
public boolean isEmpty() {
return subMap.isEmpty();
}
@Override
public TIterator<V> iterator() {
Node<K, V> from;
int fromIndex;
if (subMap.hasStart) {
subMap.setFirstKey();
from = subMap.firstKeyNode;
fromIndex = subMap.firstKeyIndex;
} else {
from = minimum(subMap.backingMap.root);
fromIndex = from != null ? from.left_idx : 0;
}
if (!subMap.hasEnd) {
return new UnboundedValueIterator<>(subMap.backingMap, from,
from == null ? 0 : from.right_idx - fromIndex);
}
subMap.setLastKey();
Node<K, V> to = subMap.lastKeyNode;
int toIndex = subMap.lastKeyIndex;
return new BoundedValueIterator<>(from,
from == null ? 0 : from.right_idx - fromIndex, subMap.backingMap, to,
to == null ? 0 : to.right_idx - toIndex);
}
@Override
public int size() {
return subMap.size();
}
}
public TTreeMap() {
}
public TTreeMap(TComparator<? super K> comparator) {
this.comparator = comparator;
}
public TTreeMap(TMap<? extends K, ? extends V> map) {
putAll(map);
}
public TTreeMap(TSortedMap<K, ? extends V> map) {
this(map.comparator());
Node<K, V> lastNode = null;
TIterator<? extends Entry<K, ? extends V>> it = map.entrySet().iterator();
while (it.hasNext()) {
Entry<K, ? extends V> entry = it.next();
lastNode = addToLast(lastNode, entry.getKey(), entry.getValue());
}
}
Node<K, V> addToLast(Node<K, V> last, K key, V value) {
if (last == null) {
root = last = createNode(key, value);
size = 1;
} else if (last.size == Node.NODE_SIZE) {
Node<K, V> newNode = createNode(key, value);
attachToRight(last, newNode);
balance(newNode);
size++;
last = newNode;
} else {
appendFromRight(last, key, value);
size++;
}
return last;
}
@Override
public void clear() {
root = null;
size = 0;
modCount++;
}
@SuppressWarnings("unchecked")
@Rename("clone")
public TObject clone0() {
try {
TTreeMap<K, V> clone = (TTreeMap<K, V>) super.clone();
if (root != null) {
clone.root = root.clone(null);
// restore prev/next chain
Node<K, V> node = minimum(clone.root);
while (true) {
Node<K, V> nxt = successor(node);
if (nxt == null) {
break;
}
nxt.prev = node;
node.next = nxt;
node = nxt;
}
}
return clone;
} catch (TCloneNotSupportedException e) {
return null;
}
}
static private <K, V> Node<K, V> successor(Node<K, V> x) {
if (x.right != null) {
return minimum(x.right);
}
Node<K, V> y = x.parent;
while (y != null && x == y.right) {
x = y;
y = y.parent;
}
return y;
}
@Override
public TComparator<? super K> comparator() {
return comparator;
}
@Override
public boolean containsKey(Object key) {
@SuppressWarnings("unchecked")
TComparable<K> object = comparator == null ? toComparable((K) key) : null;
@SuppressWarnings("unchecked")
K keyK = (K)key;
Node<K, V> node = root;
while (node != null) {
K[] keys = node.keys;
int left_idx = node.left_idx;
int result = cmp(object, keyK, keys[left_idx]);
if (result < 0) {
node = node.left;
} else if (result == 0) {
return true;
} else {
int right_idx = node.right_idx;
if (left_idx != right_idx) {
result = cmp(object, keyK, keys[right_idx]);
}
if (result > 0) {
node = node.right;
} else if (result == 0) {
return true;
} else { /*search in node*/
int low = left_idx + 1, mid = 0, high = right_idx - 1;
while (low <= high) {
mid = (low + high) >>> 1;
result = cmp(object, keyK, keys[mid]);
if (result > 0) {
low = mid + 1;
} else if (result == 0) {
return true;
} else {
high = mid - 1;
}
}
return false;
}
}
}
return false;
}
@Override
public boolean containsValue(Object value) {
if (root == null) {
return false;
}
Node<K, V> node = minimum(root);
if (value != null) {
while (node != null) {
int to = node.right_idx;
V[] values = node.values;
for (int i = node.left_idx; i <= to; i++) {
if (value.equals(values[i])) {
return true;
}
}
node = node.next;
}
} else {
while (node != null) {
int to = node.right_idx;
V[] values = node.values;
for (int i = node.left_idx; i <= to; i++) {
if (values[i] == null) {
return true;
}
}
node = node.next;
}
}
return false;
}
@Override
public TSet<Entry<K, V>> entrySet() {
if (entrySet == null) {
entrySet = new TAbstractSet<Entry<K, V>>() {
@Override
public int size() {
return size;
}
@Override
public void clear() {
TTreeMap.this.clear();
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(Object object) {
if (object instanceof Entry) {
Entry<K, V> entry = (Entry<K, V>) object;
K key = entry.getKey();
Object v1 = TTreeMap.this.get(key), v2 = entry.getValue();
return v1 == null ? ( v2 == null && TTreeMap.this.containsKey(key) ) : v1.equals(v2);
}
return false;
}
@Override
public boolean remove(Object object) {
if (contains(object)) {
@SuppressWarnings("unchecked")
Entry<K, V> entry = (Entry<K, V>) object;
K key = entry.getKey();
TTreeMap.this.remove(key);
return true;
}
return false;
}
@Override
public TIterator<Entry<K, V>> iterator() {
return new UnboundedEntryIterator<>(TTreeMap.this);
}
};
}
return entrySet;
}
@Override
public K firstKey() {
if (root != null) {
Node<K, V> node = minimum(root);
return node.keys[node.left_idx];
}
throw new TNoSuchElementException();
}
@Override
public V get(Object key) {
@SuppressWarnings("unchecked")
TComparable<K> object = comparator == null ? toComparable((K) key) : null;
@SuppressWarnings("unchecked")
K keyK = (K) key;
Node<K, V> node = root;
while (node != null) {
K[] keys = node.keys;
int left_idx = node.left_idx;
int result = cmp(object, keyK, keys[left_idx]);
if (result < 0) {
node = node.left;
} else if (result == 0) {
return node.values[left_idx];
} else {
int right_idx = node.right_idx;
if (left_idx != right_idx) {
result = cmp(object, keyK, keys[right_idx]);
}
if (result > 0) {
node = node.right;
} else if (result == 0) {
return node.values[right_idx];
} else { /*search in node*/
int low = left_idx + 1, mid = 0, high = right_idx - 1;
while (low <= high) {
mid = (low + high) >>> 1;
result = cmp(object, keyK, keys[mid]);
if (result > 0) {
low = mid + 1;
} else if (result == 0) {
return node.values[mid];
} else {
high = mid - 1;
}
}
return null;
}
}
}
return null;
}
private int cmp(TComparable<K> object, K key1, K key2) {
return object != null ? object.compareTo(key2) : comparator.compare(key1, key2);
}
@Override
public TSortedMap<K, V> headMap(K endKey) {
// Check for errors
if (comparator == null) {
toComparable(endKey).compareTo(endKey);
} else {
comparator.compare(endKey, endKey);
}
return new SubMap<>(this, endKey);
}
@Override
public TSet<K> keySet() {
if (cachedKeySet == null) {
cachedKeySet = new TAbstractSet<K>() {
@Override
public boolean contains(Object object) {
return TTreeMap.this.containsKey(object);
}
@Override
public int size() {
return TTreeMap.this.size;
}
@Override
public void clear() {
TTreeMap.this.clear();
}
@Override
public boolean remove(Object object) {
if (contains(object)) {
TTreeMap.this.remove(object);
return true;
}
return false;
}
@Override
public TIterator<K> iterator() {
return new UnboundedKeyIterator<>(TTreeMap.this);
}
};
}
return cachedKeySet;
}
@Override
public K lastKey() {
if (root != null) {
Node<K, V> node = maximum(root);
return node.keys[node.right_idx];
}
throw new TNoSuchElementException();
}
static <K,V> Node<K, V> minimum(Node<K, V> x) {
if (x == null) {
return null;
}
while (x.left != null) {
x = x.left;
}
return x;
}
static <K,V> Node<K, V> maximum(Node<K, V> x) {
if (x == null) {
return null;
}
while (x.right != null) {
x = x.right;
}
return x;
}
@Override
public V put(K key, V value) {
if (root == null) {
root = createNode(key, value);
size = 1;
modCount++;
return null;
}
TComparable<K> object = comparator == null ? toComparable(key) : null;
K keyK = key;
Node<K, V> node = root;
Node<K, V> prevNode = null;
int result = 0;
while (node != null) {
prevNode = node;
K[] keys = node.keys;
int left_idx = node.left_idx;
result = cmp(object, keyK, keys[left_idx]);
if (result < 0) {
node = node.left;
} else if (result == 0) {
V res = node.values[left_idx];
node.values[left_idx] = value;
return res;
} else {
int right_idx = node.right_idx;
if (left_idx != right_idx) {
result = cmp(object, keyK, keys[right_idx]);
}
if (result > 0) {
node = node.right;
} else if (result == 0) {
V res = node.values[right_idx];
node.values[right_idx] = value;
return res;
} else { /*search in node*/
int low = left_idx + 1, mid = 0, high = right_idx - 1;
while (low <= high) {
mid = (low + high) >>> 1;
result = cmp(object, keyK, keys[mid]);
if (result > 0) {
low = mid + 1;
} else if (result == 0) {
V res = node.values[mid];
node.values[mid] = value;
return res;
} else {
high = mid - 1;
}
}
result = low;
break;
}
}
} /* while */
/*
if(node == null) {
if(prevNode==null) {
- case of empty Tree
} else {
result < 0 - prevNode.left==null - attach here
result > 0 - prevNode.right==null - attach here
}
} else {
insert into node.
result - index where it should be inserted.
}
*/
size++;
modCount++;
if (node == null) {
if (prevNode == null) {
// case of empty Tree
root = createNode(key, value);
} else if (prevNode.size < Node.NODE_SIZE) {
// there is a place for insert
if (result < 0) {
appendFromLeft(prevNode, key, value);
} else {
appendFromRight(prevNode, key, value);
}
} else {
// create and link
Node<K, V> newNode = createNode(key, value);
if (result < 0) {
attachToLeft(prevNode, newNode);
} else {
attachToRight(prevNode, newNode);
}
balance(newNode);
}
} else {
// insert into node.
// result - index where it should be inserted.
if (node.size < Node.NODE_SIZE) { // insert and ok
int left_idx = node.left_idx;
int right_idx = node.right_idx;
if (left_idx == 0 || ((right_idx != Node.NODE_SIZE - 1) && (right_idx - result <= result - left_idx))) {
int right_idxPlus1 = right_idx + 1;
System.arraycopy(node.keys, result, node.keys, result + 1, right_idxPlus1 - result);
System.arraycopy(node.values, result, node.values, result + 1, right_idxPlus1 - result);
node.right_idx = right_idxPlus1;
node.keys[result] = key;
node.values[result] = value;
} else {
int left_idxMinus1 = left_idx - 1;
System.arraycopy(node.keys, left_idx, node.keys, left_idxMinus1, result - left_idx);
System.arraycopy(node.values, left_idx, node.values, left_idxMinus1, result - left_idx);
node.left_idx = left_idxMinus1;
node.keys[result - 1] = key;
node.values[result - 1] = value;
}
node.size++;
} else {
// there are no place here
// insert and push old pair
Node<K, V> previous = node.prev;
Node<K, V> nextNode = node.next;
boolean removeFromStart;
boolean attachFromLeft = false;
Node<K, V> attachHere = null;
if (previous == null) {
if (nextNode != null && nextNode.size < Node.NODE_SIZE) {
// move last pair to next
removeFromStart = false;
} else {
// next node doesn't exist or full
// left==null
// drop first pair to new node from left
removeFromStart = true;
attachFromLeft = true;
attachHere = node;
}
} else if (nextNode == null) {
if (previous.size < Node.NODE_SIZE) {
// move first pair to prev
removeFromStart = true;
} else {
// right == null;
// drop last pair to new node from right
removeFromStart = false;
attachFromLeft = false;
attachHere = node;
}
} else {
if (previous.size < Node.NODE_SIZE) {
if (nextNode.size < Node.NODE_SIZE) {
// choose prev or next for moving
removeFromStart = previous.size < nextNode.size;
} else {
// move first pair to prev
removeFromStart = true;
}
} else {
if (nextNode.size < Node.NODE_SIZE) {
// move last pair to next
removeFromStart = false;
} else {
// prev & next are full
// if node.right!=null then node.next.left==null
// if node.left!=null then node.prev.right==null
if (node.right == null) {
attachHere = node;
attachFromLeft = false;
removeFromStart = false;
} else {
attachHere = nextNode;
attachFromLeft = true;
removeFromStart = false;
}
}
}
}
K movedKey;
V movedValue;
if (removeFromStart) {
// node.left_idx == 0
movedKey = node.keys[0];
movedValue = node.values[0];
int resMunus1 = result - 1;
System.arraycopy(node.keys, 1, node.keys, 0, resMunus1);
System.arraycopy(node.values, 1, node.values, 0, resMunus1);
node.keys [resMunus1] = key;
node.values[resMunus1] = value;
} else {
// node.right_idx == Node.NODE_SIZE - 1
movedKey = node.keys[Node.NODE_SIZE - 1];
movedValue = node.values[Node.NODE_SIZE - 1];
System.arraycopy(node.keys, result, node.keys, result + 1, Node.NODE_SIZE - 1 - result);
System.arraycopy(node.values, result, node.values, result + 1, Node.NODE_SIZE - 1 - result);
node.keys[result] = key;
node.values[result] = value;
}
if (attachHere == null) {
if (removeFromStart) {
appendFromRight(previous, movedKey, movedValue);
} else {
appendFromLeft(nextNode, movedKey, movedValue);
}
} else {
Node<K, V> newNode = createNode(movedKey, movedValue);
if (attachFromLeft) {
attachToLeft(attachHere, newNode);
} else {
attachToRight(attachHere, newNode);
}
balance(newNode);
}
}
}
return null;
}
private void appendFromLeft(Node<K, V> node, K keyObj, V value) {
if (node.left_idx == 0) {
int new_right = node.right_idx + 1;
System.arraycopy(node.keys, 0, node.keys, 1, new_right);
System.arraycopy(node.values, 0, node.values, 1, new_right);
node.right_idx = new_right;
} else {
node.left_idx--;
}
node.size++;
node.keys[node.left_idx] = keyObj;
node.values[node.left_idx] = value;
}
private void attachToLeft(Node<K, V> node, Node<K, V> newNode) {
newNode.parent = node;
// node.left==null - attach here
node.left = newNode;
Node<K, V> predecessor = node.prev;
newNode.prev = predecessor;
newNode.next = node;
if (predecessor != null) {
predecessor.next = newNode;
}
node.prev = newNode;
}
/* add pair into node; existence free room in the node should be checked
* before call
*/
private void appendFromRight(Node<K, V> node, K keyObj, V value) {
if (node.right_idx == Node.NODE_SIZE - 1) {
int left_idx = node.left_idx;
int left_idxMinus1 = left_idx - 1;
System.arraycopy(node.keys, left_idx, node.keys, left_idxMinus1, Node.NODE_SIZE - left_idx);
System.arraycopy(node.values, left_idx, node.values, left_idxMinus1, Node.NODE_SIZE - left_idx);
node.left_idx = left_idxMinus1;
} else {
node.right_idx++;
}
node.size++;
node.keys[node.right_idx] = keyObj;
node.values[node.right_idx] = value;
}
private void attachToRight(Node<K, V> node, Node<K, V> newNode) {
newNode.parent = node;
// - node.right==null - attach here
node.right = newNode;
newNode.prev = node;
Node<K, V> successor = node.next;
newNode.next = successor;
if (successor != null) {
successor.prev = newNode;
}
node.next = newNode;
}
private Node<K, V> createNode(K keyObj, V value) {
Node<K, V> node = new Node<>();
node.keys[0] = keyObj;
node.values[0] = value;
node.left_idx = 0;
node.right_idx = 0;
node.size = 1;
return node;
}
void balance(Node<K, V> x) {
Node<K, V> y;
x.color = true;
while (x != root && x.parent.color) {
if (x.parent == x.parent.parent.left) {
y = x.parent.parent.right;
if (y != null && y.color) {
x.parent.color = false;
y.color = false;
x.parent.parent.color = true;
x = x.parent.parent;
} else {
if (x == x.parent.right) {
x = x.parent;
leftRotate(x);
}
x.parent.color = false;
x.parent.parent.color = true;
rightRotate(x.parent.parent);
}
} else {
y = x.parent.parent.left;
if (y != null && y.color) {
x.parent.color = false;
y.color = false;
x.parent.parent.color = true;
x = x.parent.parent;
} else {
if (x == x.parent.left) {
x = x.parent;
rightRotate(x);
}
x.parent.color = false;
x.parent.parent.color = true;
leftRotate(x.parent.parent);
}
}
}
root.color = false;
}
private void rightRotate(Node<K, V> x) {
Node<K, V> y = x.left;
x.left = y.right;
if (y.right != null) {
y.right.parent = x;
}
y.parent = x.parent;
if (x.parent == null) {
root = y;
} else {
if (x == x.parent.right) {
x.parent.right = y;
} else {
x.parent.left = y;
}
}
y.right = x;
x.parent = y;
}
private void leftRotate(Node<K, V> x) {
Node<K, V> y = x.right;
x.right = y.left;
if (y.left != null) {
y.left.parent = x;
}
y.parent = x.parent;
if (x.parent == null) {
root = y;
} else {
if (x == x.parent.left) {
x.parent.left = y;
} else {
x.parent.right = y;
}
}
y.left = x;
x.parent = y;
}
@Override
public void putAll(TMap<? extends K, ? extends V> map) {
super.putAll(map);
}
@Override
public V remove(Object key) {
if (size == 0) {
return null;
}
@SuppressWarnings("unchecked")
TComparable<K> object = comparator == null ? toComparable((K) key) : null;
@SuppressWarnings("unchecked")
K keyK = (K) key;
Node<K, V> node = root;
while (node != null) {
K[] keys = node.keys;
int left_idx = node.left_idx;
int result = cmp(object, keyK, keys[left_idx]);
if (result < 0) {
node = node.left;
} else if (result == 0) {
V value = node.values[left_idx];
removeLeftmost(node);
return value;
} else {
int right_idx = node.right_idx;
if (left_idx != right_idx) {
result = cmp(object, keyK, keys[right_idx]);
}
if (result > 0) {
node = node.right;
} else if (result == 0) {
V value = node.values[right_idx];
removeRightmost(node);
return value;
} else { /*search in node*/
int low = left_idx + 1, mid = 0, high = right_idx - 1;
while (low <= high) {
mid = (low + high) >>> 1;
result = cmp(object, keyK, keys[mid]);
if (result > 0) {
low = mid + 1;
} else if (result == 0) {
V value = node.values[mid];
removeMiddleElement(node, mid);
return value;
} else {
high = mid - 1;
}
}
return null;
}
}
}
return null;
}
void removeLeftmost(Node<K, V> node) {
int index = node.left_idx;
if (node.size == 1) {
deleteNode(node);
} else if (node.prev != null && (Node.NODE_SIZE - 1 - node.prev.right_idx) > node.size) {
// move all to prev node and kill it
Node<K, V> prev = node.prev;
int size = node.right_idx - index;
System.arraycopy(node.keys, index + 1, prev.keys, prev.right_idx + 1, size);
System.arraycopy(node.values, index + 1, prev.values, prev.right_idx + 1, size);
prev.right_idx += size;
prev.size += size;
deleteNode(node);
} else if (node.next != null && (node.next.left_idx) > node.size) {
// move all to next node and kill it
Node<K, V> next = node.next;
int size = node.right_idx - index;
int next_new_left = next.left_idx - size;
next.left_idx = next_new_left;
System.arraycopy(node.keys, index + 1, next.keys, next_new_left, size);
System.arraycopy(node.values, index + 1, next.values, next_new_left, size);
next.size += size;
deleteNode(node);
} else {
node.keys[index] = null;
node.values[index] = null;
node.left_idx++;
node.size--;
Node<K, V> prev = node.prev;
if (prev != null && prev.size == 1) {
node.size++;
node.left_idx--;
node.keys [node.left_idx] = prev.keys [prev.left_idx];
node.values[node.left_idx] = prev.values[prev.left_idx];
deleteNode(prev);
}
}
modCount++;
size--;
}
void removeRightmost(Node<K, V> node) {
int index = node.right_idx;
if (node.size == 1) {
deleteNode(node);
} else if (node.prev != null && (Node.NODE_SIZE - 1 - node.prev.right_idx) > node.size) {
// move all to prev node and kill it
Node<K, V> prev = node.prev;
int left_idx = node.left_idx;
int size = index - left_idx;
System.arraycopy(node.keys, left_idx, prev.keys, prev.right_idx + 1, size);
System.arraycopy(node.values, left_idx, prev.values, prev.right_idx + 1, size);
prev.right_idx += size;
prev.size += size;
deleteNode(node);
} else if (node.next != null && (node.next.left_idx) > node.size) {
// move all to next node and kill it
Node<K, V> next = node.next;
int left_idx = node.left_idx;
int size = index - left_idx;
int next_new_left = next.left_idx - size;
next.left_idx = next_new_left;
System.arraycopy(node.keys, left_idx, next.keys, next_new_left, size);
System.arraycopy(node.values, left_idx, next.values, next_new_left, size);
next.size += size;
deleteNode(node);
} else {
node.keys[index] = null;
node.values[index] = null;
node.right_idx--;
node.size--;
Node<K, V> next = node.next;
if (next != null && next.size == 1) {
node.size++;
node.right_idx++;
node.keys[node.right_idx] = next.keys[next.left_idx];
node.values[node.right_idx] = next.values[next.left_idx];
deleteNode(next);
}
}
modCount++;
size--;
}
void removeMiddleElement(Node<K, V> node, int index) {
// this function is called iff index if some middle element;
// so node.left_idx < index < node.right_idx
// condition above assume that node.size > 1
if (node.prev != null && (Node.NODE_SIZE - 1 - node.prev.right_idx) > node.size) {
// move all to prev node and kill it
Node<K, V> prev = node.prev;
int left_idx = node.left_idx;
int size = index - left_idx;
System.arraycopy(node.keys, left_idx, prev.keys, prev.right_idx + 1, size);
System.arraycopy(node.values, left_idx, prev.values, prev.right_idx + 1, size);
prev.right_idx += size;
size = node.right_idx - index;
System.arraycopy(node.keys, index + 1, prev.keys, prev.right_idx + 1, size);
System.arraycopy(node.values, index + 1, prev.values, prev.right_idx + 1, size);
prev.right_idx += size;
prev.size += (node.size - 1);
deleteNode(node);
} else if (node.next != null && (node.next.left_idx) > node.size) {
// move all to next node and kill it
Node<K, V> next = node.next;
int left_idx = node.left_idx;
int next_new_left = next.left_idx - node.size + 1;
next.left_idx = next_new_left;
int size = index - left_idx;
System.arraycopy(node.keys, left_idx, next.keys, next_new_left, size);
System.arraycopy(node.values, left_idx, next.values, next_new_left, size);
next_new_left += size;
size = node.right_idx - index;
System.arraycopy(node.keys, index + 1, next.keys, next_new_left, size);
System.arraycopy(node.values, index + 1, next.values, next_new_left, size);
next.size += (node.size - 1);
deleteNode(node);
} else {
int moveFromRight = node.right_idx - index;
int left_idx = node.left_idx;
int moveFromLeft = index - left_idx ;
if (moveFromRight <= moveFromLeft) {
System.arraycopy(node.keys, index + 1, node.keys, index, moveFromRight);
System.arraycopy(node.values, index + 1, node.values, index, moveFromRight);
Node<K, V> next = node.next;
if (next != null && next.size == 1) {
node.keys [node.right_idx] = next.keys [next.left_idx];
node.values[node.right_idx] = next.values[next.left_idx];
deleteNode(next);
} else {
node.keys [node.right_idx] = null;
node.values[node.right_idx] = null;
node.right_idx--;
node.size--;
}
} else {
System.arraycopy(node.keys, left_idx , node.keys, left_idx + 1, moveFromLeft);
System.arraycopy(node.values, left_idx , node.values, left_idx + 1, moveFromLeft);
Node<K, V> prev = node.prev;
if (prev != null && prev.size == 1) {
node.keys [left_idx ] = prev.keys [prev.left_idx];
node.values[left_idx ] = prev.values[prev.left_idx];
deleteNode(prev);
} else {
node.keys [left_idx ] = null;
node.values[left_idx ] = null;
node.left_idx++;
node.size--;
}
}
}
modCount++;
size--;
}
void removeFromIterator(Node<K, V> node, int index) {
if (node.size == 1) {
// it is safe to delete the whole node here.
// iterator already moved to the next node;
deleteNode(node);
} else {
int left_idx = node.left_idx;
if (index == left_idx) {
Node<K, V> prev = node.prev;
if (prev != null && prev.size == 1) {
node.keys [left_idx] = prev.keys [prev.left_idx];
node.values[left_idx] = prev.values[prev.left_idx];
deleteNode(prev);
} else {
node.keys [left_idx] = null;
node.values[left_idx] = null;
node.left_idx++;
node.size--;
}
} else if (index == node.right_idx) {
node.keys [index] = null;
node.values[index] = null;
node.right_idx--;
node.size--;
} else {
int moveFromRight = node.right_idx - index;
int moveFromLeft = index - left_idx;
if (moveFromRight <= moveFromLeft) {
System.arraycopy(node.keys, index + 1, node.keys, index, moveFromRight );
System.arraycopy(node.values, index + 1, node.values, index, moveFromRight );
node.keys [node.right_idx] = null;
node.values[node.right_idx] = null;
node.right_idx--;
node.size--;
} else {
System.arraycopy(node.keys, left_idx, node.keys, left_idx+ 1, moveFromLeft);
System.arraycopy(node.values, left_idx, node.values, left_idx+ 1, moveFromLeft);
node.keys [left_idx] = null;
node.values[left_idx] = null;
node.left_idx++;
node.size--;
}
}
}
modCount++;
size--;
}
private void deleteNode(Node<K, V> node) {
if (node.right == null) {
if (node.left != null) {
attachToParent(node, node.left);
} else {
attachNullToParent(node);
}
fixNextChain(node);
} else if(node.left == null) { // node.right != null
attachToParent(node, node.right);
fixNextChain(node);
} else {
// Here node.left!=nul && node.right!=null
// node.next should replace node in tree
// node.next!=null by tree logic.
// node.next.left==null by tree logic.
// node.next.right may be null or non-null
Node<K, V> toMoveUp = node.next;
fixNextChain(node);
if(toMoveUp.right==null){
attachNullToParent(toMoveUp);
} else {
attachToParent(toMoveUp, toMoveUp.right);
}
// Here toMoveUp is ready to replace node
toMoveUp.left = node.left;
if (node.left != null) {
node.left.parent = toMoveUp;
}
toMoveUp.right = node.right;
if (node.right != null) {
node.right.parent = toMoveUp;
}
attachToParentNoFixup(node,toMoveUp);
toMoveUp.color = node.color;
}
}
private void attachToParentNoFixup(Node<K, V> toDelete, Node<K, V> toConnect) {
// assert toConnect!=null
Node<K,V> parent = toDelete.parent;
toConnect.parent = parent;
if (parent == null) {
root = toConnect;
} else if (toDelete == parent.left) {
parent.left = toConnect;
} else {
parent.right = toConnect;
}
}
private void attachToParent(Node<K, V> toDelete, Node<K, V> toConnect) {
// assert toConnect!=null
attachToParentNoFixup(toDelete,toConnect);
if (!toDelete.color) {
fixup(toConnect);
}
}
private void attachNullToParent(Node<K, V> toDelete) {
Node<K, V> parent = toDelete.parent;
if (parent == null) {
root = null;
} else {
if (toDelete == parent.left) {
parent.left = null;
} else {
parent.right = null;
}
if (!toDelete.color) {
fixup(parent);
}
}
}
private void fixNextChain(Node<K, V> node) {
if (node.prev != null) {
node.prev.next = node.next;
}
if (node.next != null) {
node.next.prev = node.prev;
}
}
private void fixup(Node<K, V> x) {
Node<K, V> w;
while (x != root && !x.color) {
if (x == x.parent.left) {
w = x.parent.right;
if (w == null) {
x = x.parent;
continue;
}
if (w.color) {
w.color = false;
x.parent.color = true;
leftRotate(x.parent);
w = x.parent.right;
if (w == null) {
x = x.parent;
continue;
}
}
if ((w.left == null || !w.left.color)
&& (w.right == null || !w.right.color)) {
w.color = true;
x = x.parent;
} else {
if (w.right == null || !w.right.color) {
w.left.color = false;
w.color = true;
rightRotate(w);
w = x.parent.right;
}
w.color = x.parent.color;
x.parent.color = false;
w.right.color = false;
leftRotate(x.parent);
x = root;
}
} else {
w = x.parent.left;
if (w == null) {
x = x.parent;
continue;
}
if (w.color) {
w.color = false;
x.parent.color = true;
rightRotate(x.parent);
w = x.parent.left;
if (w == null) {
x = x.parent;
continue;
}
}
if ((w.left == null || !w.left.color)
&& (w.right == null || !w.right.color)) {
w.color = true;
x = x.parent;
} else {
if (w.left == null || !w.left.color) {
w.right.color = false;
w.color = true;
leftRotate(w);
w = x.parent.left;
}
w.color = x.parent.color;
x.parent.color = false;
w.left.color = false;
rightRotate(x.parent);
x = root;
}
}
}
x.color = false;
}
@Override
public int size() {
return size;
}
@Override
public TSortedMap<K, V> subMap(K startKey, K endKey) {
if (comparator == null) {
if (toComparable(startKey).compareTo(endKey) <= 0) {
return new SubMap<>(startKey, this, endKey);
}
} else {
if (comparator.compare(startKey, endKey) <= 0) {
return new SubMap<>(startKey, this, endKey);
}
}
throw new TIllegalArgumentException();
}
@Override
public TSortedMap<K, V> tailMap(K startKey) {
// Check for errors
if (comparator == null) {
toComparable(startKey).compareTo(startKey);
} else {
comparator.compare(startKey, startKey);
}
return new SubMap<>(startKey, this);
}
@Override
public TCollection<V> values() {
if (cachedValues == null) {
cachedValues = new TAbstractCollection<V>() {
@Override
public boolean contains(Object object) {
return containsValue(object);
}
@Override
public int size() {
return size;
}
@Override
public void clear() {
TTreeMap.this.clear();
}
@Override
public TIterator<V> iterator() {
return new UnboundedValueIterator<>(TTreeMap.this);
}
};
}
return cachedValues;
}
}
|
Rewrite TreeMap
|
teavm-classlib/src/main/java/org/teavm/classlib/java/util/TTreeMap.java
|
Rewrite TreeMap
|
|
Java
|
apache-2.0
|
d184d35ef4800d9e7549cef7b258d4616b5a272e
| 0
|
iamironz/binaryprefs
|
package com.ironz.binaryprefs;
import android.content.SharedPreferences;
import com.ironz.binaryprefs.exception.ExceptionHandler;
import com.ironz.binaryprefs.files.FileAdapter;
import com.ironz.binaryprefs.name.KeyNameProvider;
import com.ironz.binaryprefs.util.Bits;
import java.util.*;
final class BinaryPreferencesEditor implements SharedPreferences.Editor {
private final Map<String, byte[]> commitMap = new HashMap<>();
private final Set<String> removeSet = new HashSet<>();
private final FileAdapter fileAdapter;
private final ExceptionHandler exceptionHandler;
private final List<SharedPreferences.OnSharedPreferenceChangeListener> listeners;
private final SharedPreferences preferences;
private final KeyNameProvider keyNameProvider;
private boolean clear;
BinaryPreferencesEditor(FileAdapter fileAdapter,
ExceptionHandler exceptionHandler,
List<SharedPreferences.OnSharedPreferenceChangeListener> listeners,
SharedPreferences preferences,
KeyNameProvider keyNameProvider) {
this.fileAdapter = fileAdapter;
this.exceptionHandler = exceptionHandler;
this.listeners = listeners;
this.preferences = preferences;
this.keyNameProvider = keyNameProvider;
}
@Override
public SharedPreferences.Editor putString(String key, String value) {
if (value == null) {
return remove(key);
}
String name = keyNameProvider.convertStringName(key);
byte[] bytes = value.getBytes();
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putStringSet(String key, Set<String> values) {
if (values == null) {
return remove(key);
}
int i = 0;
for (String value : values) {
String name = keyNameProvider.convertStringSetName(key, i);
byte[] bytes = value.getBytes();
commitMap.put(name, bytes);
i++;
}
return this;
}
@Override
public SharedPreferences.Editor putInt(String key, int value) {
String name = keyNameProvider.convertIntName(key);
byte[] bytes = Bits.intToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putLong(String key, long value) {
String name = keyNameProvider.convertLongName(key);
byte[] bytes = Bits.longToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putFloat(String key, float value) {
String name = keyNameProvider.convertFloatName(key);
byte[] bytes = Bits.floatToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putBoolean(String key, boolean value) {
String name = keyNameProvider.convertBooleanName(key);
byte[] bytes = Bits.booleanToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor remove(String key) {
removeSet.add(key);
return this;
}
@Override
public SharedPreferences.Editor clear() {
clear = true;
return this;
}
@Override
public void apply() {
performActions();
}
@Override
public boolean commit() {
return withCallBack();
}
private boolean withCallBack() {
try {
performActions();
return true;
} catch (Exception e) {
exceptionHandler.handle(e);
}
return false;
}
private void performActions() {
tryClearAll();
tryRemoveByKeys();
tryStoreByKey();
}
private void tryClearAll() {
if (clear) {
fileAdapter.clear();
}
}
private void tryRemoveByKeys() {
for (String fileName : fileAdapter.names()) {
String key = getKeyFromFileName(fileName);
if (!removeSet.contains(key)) {
continue;
}
fileAdapter.remove(fileName);
notifyListeners(key);
}
}
private void tryStoreByKey() {
for (String fileName : commitMap.keySet()) {
String key = getKeyFromFileName(fileName);
fileAdapter.save(fileName, commitMap.get(fileName));
notifyListeners(key);
}
}
private void notifyListeners(String key) {
for (SharedPreferences.OnSharedPreferenceChangeListener listener : listeners) {
listener.onSharedPreferenceChanged(preferences, key);
}
}
private String getKeyFromFileName(String fileName) {
return fileName.split("\\.", 2)[0];
}
}
|
library/src/main/java/com/ironz/binaryprefs/BinaryPreferencesEditor.java
|
package com.ironz.binaryprefs;
import android.content.SharedPreferences;
import com.ironz.binaryprefs.exception.ExceptionHandler;
import com.ironz.binaryprefs.files.FileAdapter;
import com.ironz.binaryprefs.name.KeyNameProvider;
import com.ironz.binaryprefs.util.Bits;
import java.util.*;
final class BinaryPreferencesEditor implements SharedPreferences.Editor {
private final Map<String, byte[]> commitMap = new HashMap<>();
private final Set<String> removeSet = new HashSet<>();
private final FileAdapter fileAdapter;
private final ExceptionHandler exceptionHandler;
private final List<SharedPreferences.OnSharedPreferenceChangeListener> listeners;
private final SharedPreferences preferences;
private final KeyNameProvider keyNameProvider;
private boolean clear;
BinaryPreferencesEditor(FileAdapter fileAdapter,
ExceptionHandler exceptionHandler,
List<SharedPreferences.OnSharedPreferenceChangeListener> listeners,
SharedPreferences preferences,
KeyNameProvider keyNameProvider) {
this.fileAdapter = fileAdapter;
this.exceptionHandler = exceptionHandler;
this.listeners = listeners;
this.preferences = preferences;
this.keyNameProvider = keyNameProvider;
}
@Override
public SharedPreferences.Editor putString(String key, String value) {
if (value == null) {
return remove(key);
}
String name = keyNameProvider.convertStringName(key);
byte[] bytes = value.getBytes();
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putStringSet(String key, Set<String> values) {
if (values == null) {
return remove(key);
}
int i = 0;
for (String value : values) {
String name = keyNameProvider.convertStringSetName(key, i);
byte[] bytes = value.getBytes();
commitMap.put(name, bytes);
i++;
}
return this;
}
@Override
public SharedPreferences.Editor putInt(String key, int value) {
String name = keyNameProvider.convertIntName(key);
byte[] bytes = Bits.intToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putLong(String key, long value) {
String name = keyNameProvider.convertLongName(key);
byte[] bytes = Bits.longToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putFloat(String key, float value) {
String name = keyNameProvider.convertFloatName(key);
byte[] bytes = Bits.floatToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor putBoolean(String key, boolean value) {
String name = keyNameProvider.convertBooleanName(key);
byte[] bytes = Bits.booleanToBytes(value);
commitMap.put(name, bytes);
return this;
}
@Override
public SharedPreferences.Editor remove(String key) {
removeSet.add(key);
return this;
}
@Override
public SharedPreferences.Editor clear() {
clear = true;
return this;
}
@Override
public void apply() {
performActions();
}
@Override
public boolean commit() {
return withCallBack();
}
private boolean withCallBack() {
try {
performActions();
return true;
} catch (Exception e) {
exceptionHandler.handle(e);
}
return false;
}
private void performActions() {
tryClearAll();
tryRemoveByKeys();
tryStoreByKey();
}
private void tryClearAll() {
if (clear) {
fileAdapter.clear();
}
}
private void tryRemoveByKeys() {
for (String fileName : fileAdapter.names()) {
String key = getKeyFromFileName(fileName.split("\\.", 2)[0]);
if (!removeSet.contains(key)) {
continue;
}
fileAdapter.remove(fileName);
notifyListeners(key);
}
}
private void tryStoreByKey() {
for (String fileName : commitMap.keySet()) {
String key = getKeyFromFileName(fileName.split("\\.", 2)[0]);
fileAdapter.save(fileName, commitMap.get(fileName));
notifyListeners(key);
}
}
private void notifyListeners(String key) {
for (SharedPreferences.OnSharedPreferenceChangeListener listener : listeners) {
listener.onSharedPreferenceChanged(preferences, key);
}
}
private String getKeyFromFileName(String s) {
return s;
}
}
|
small fixed for pattern method
|
library/src/main/java/com/ironz/binaryprefs/BinaryPreferencesEditor.java
|
small fixed for pattern method
|
|
Java
|
apache-2.0
|
1d894a15160cba0a835b2b33cd6a3c96773b1eca
| 0
|
apache/tomcat,apache/tomcat,Nickname0806/Test_Q4,apache/tomcat,Nickname0806/Test_Q4,Nickname0806/Test_Q4,apache/tomcat,Nickname0806/Test_Q4,apache/tomcat
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.util;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.LifecycleEvent;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleListener;
import org.apache.catalina.LifecycleState;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.res.StringManager;
/**
* Base implementation of the {@link Lifecycle} interface that implements the
* state transition rules for {@link Lifecycle#start()} and
* {@link Lifecycle#stop()}
*/
public abstract class LifecycleBase implements Lifecycle {
private static final Log log = LogFactory.getLog(LifecycleBase.class);
private static final StringManager sm = StringManager.getManager(LifecycleBase.class);
/**
* The list of registered LifecycleListeners for event notifications.
*/
private final List<LifecycleListener> lifecycleListeners = new CopyOnWriteArrayList<>();
/**
* The current state of the source component.
*/
private volatile LifecycleState state = LifecycleState.NEW;
/**
* {@inheritDoc}
*/
@Override
public void addLifecycleListener(LifecycleListener listener) {
lifecycleListeners.add(listener);
}
/**
* {@inheritDoc}
*/
@Override
public LifecycleListener[] findLifecycleListeners() {
return lifecycleListeners.toArray(new LifecycleListener[0]);
}
/**
* {@inheritDoc}
*/
@Override
public void removeLifecycleListener(LifecycleListener listener) {
lifecycleListeners.remove(listener);
}
/**
* Allow sub classes to fire {@link Lifecycle} events.
*
* @param type Event type
* @param data Data associated with event.
*/
protected void fireLifecycleEvent(String type, Object data) {
LifecycleEvent event = new LifecycleEvent(this, type, data);
for (LifecycleListener listener : lifecycleListeners) {
listener.lifecycleEvent(event);
}
}
@Override
public final synchronized void init() throws LifecycleException {
if (!state.equals(LifecycleState.NEW)) {
invalidTransition(Lifecycle.BEFORE_INIT_EVENT);
}
setStateInternal(LifecycleState.INITIALIZING, null, false);
try {
initInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(
sm.getString("lifecycleBase.initFail",toString()), t);
}
setStateInternal(LifecycleState.INITIALIZED, null, false);
}
protected abstract void initInternal() throws LifecycleException;
/**
* {@inheritDoc}
*/
@Override
public final synchronized void start() throws LifecycleException {
if (LifecycleState.STARTING_PREP.equals(state) || LifecycleState.STARTING.equals(state) ||
LifecycleState.STARTED.equals(state)) {
if (log.isDebugEnabled()) {
Exception e = new LifecycleException();
log.debug(sm.getString("lifecycleBase.alreadyStarted", toString()), e);
} else if (log.isInfoEnabled()) {
log.info(sm.getString("lifecycleBase.alreadyStarted", toString()));
}
return;
}
if (state.equals(LifecycleState.NEW)) {
init();
} else if (state.equals(LifecycleState.FAILED)) {
stop();
} else if (!state.equals(LifecycleState.INITIALIZED) &&
!state.equals(LifecycleState.STOPPED)) {
invalidTransition(Lifecycle.BEFORE_START_EVENT);
}
setStateInternal(LifecycleState.STARTING_PREP, null, false);
try {
startInternal();
} catch (Throwable t) {
// This is an 'uncontrolled' failure so put the component into the
// FAILED state and throw an exception.
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(sm.getString("lifecycleBase.startFail", toString()), t);
}
if (state.equals(LifecycleState.FAILED)) {
// This is a 'controlled' failure. The component put itself into the
// FAILED state so call stop() to complete the clean-up.
stop();
} else if (!state.equals(LifecycleState.STARTING)) {
// Shouldn't be necessary but acts as a check that sub-classes are
// doing what they are supposed to.
invalidTransition(Lifecycle.AFTER_START_EVENT);
} else {
setStateInternal(LifecycleState.STARTED, null, false);
}
}
/**
* Sub-classes must ensure that the state is changed to
* {@link LifecycleState#STARTING} during the execution of this method.
* Changing state will trigger the {@link Lifecycle#START_EVENT} event.
*
* If a component fails to start it may either throw a
* {@link LifecycleException} which will cause it's parent to fail to start
* or it can place itself in the error state in which case {@link #stop()}
* will be called on the failed component but the parent component will
* continue to start normally.
*
* @throws LifecycleException Start error occurred
*/
protected abstract void startInternal() throws LifecycleException;
/**
* {@inheritDoc}
*/
@Override
public final synchronized void stop() throws LifecycleException {
if (LifecycleState.STOPPING_PREP.equals(state) || LifecycleState.STOPPING.equals(state) ||
LifecycleState.STOPPED.equals(state)) {
if (log.isDebugEnabled()) {
Exception e = new LifecycleException();
log.debug(sm.getString("lifecycleBase.alreadyStopped", toString()), e);
} else if (log.isInfoEnabled()) {
log.info(sm.getString("lifecycleBase.alreadyStopped", toString()));
}
return;
}
if (state.equals(LifecycleState.NEW)) {
state = LifecycleState.STOPPED;
return;
}
if (!state.equals(LifecycleState.STARTED) && !state.equals(LifecycleState.FAILED)) {
invalidTransition(Lifecycle.BEFORE_STOP_EVENT);
}
if (state.equals(LifecycleState.FAILED)) {
// Don't transition to STOPPING_PREP as that would briefly mark the
// component as available but do ensure the BEFORE_STOP_EVENT is
// fired
fireLifecycleEvent(BEFORE_STOP_EVENT, null);
} else {
setStateInternal(LifecycleState.STOPPING_PREP, null, false);
}
try {
stopInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(sm.getString("lifecycleBase.stopFail",toString()), t);
} finally {
if (this instanceof Lifecycle.SingleUse) {
// Complete stop process first
setStateInternal(LifecycleState.STOPPED, null, false);
destroy();
return;
}
}
// Shouldn't be necessary but acts as a check that sub-classes are
// doing what they are supposed to.
if (!state.equals(LifecycleState.STOPPING) && !state.equals(LifecycleState.FAILED)) {
invalidTransition(Lifecycle.AFTER_STOP_EVENT);
}
setStateInternal(LifecycleState.STOPPED, null, false);
}
/**
* Sub-classes must ensure that the state is changed to
* {@link LifecycleState#STOPPING} during the execution of this method.
* Changing state will trigger the {@link Lifecycle#STOP_EVENT} event.
*
* @throws LifecycleException Stop error occurred
*/
protected abstract void stopInternal() throws LifecycleException;
@Override
public final synchronized void destroy() throws LifecycleException {
if (LifecycleState.FAILED.equals(state)) {
try {
// Triggers clean-up
stop();
} catch (LifecycleException e) {
// Just log. Still want to destroy.
log.warn(sm.getString(
"lifecycleBase.destroyStopFail", toString()), e);
}
}
if (LifecycleState.DESTROYING.equals(state) ||
LifecycleState.DESTROYED.equals(state)) {
if (log.isDebugEnabled()) {
Exception e = new LifecycleException();
log.debug(sm.getString("lifecycleBase.alreadyDestroyed", toString()), e);
} else if (log.isInfoEnabled() && !(this instanceof Lifecycle.SingleUse)) {
// Rather than have every component that might need to call
// destroy() check for SingleUse, don't log an info message if
// multiple calls are made to destroy()
log.info(sm.getString("lifecycleBase.alreadyDestroyed", toString()));
}
return;
}
if (!state.equals(LifecycleState.STOPPED) &&
!state.equals(LifecycleState.FAILED) &&
!state.equals(LifecycleState.NEW) &&
!state.equals(LifecycleState.INITIALIZED)) {
invalidTransition(Lifecycle.BEFORE_DESTROY_EVENT);
}
setStateInternal(LifecycleState.DESTROYING, null, false);
try {
destroyInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(
sm.getString("lifecycleBase.destroyFail",toString()), t);
}
setStateInternal(LifecycleState.DESTROYED, null, false);
}
protected abstract void destroyInternal() throws LifecycleException;
/**
* {@inheritDoc}
*/
@Override
public LifecycleState getState() {
return state;
}
/**
* {@inheritDoc}
*/
@Override
public String getStateName() {
return getState().toString();
}
/**
* Provides a mechanism for sub-classes to update the component state.
* Calling this method will automatically fire any associated
* {@link Lifecycle} event. It will also check that any attempted state
* transition is valid for a sub-class.
*
* @param state The new state for this component
* @throws LifecycleException when attempting to set an invalid state
*/
protected synchronized void setState(LifecycleState state)
throws LifecycleException {
setStateInternal(state, null, true);
}
/**
* Provides a mechanism for sub-classes to update the component state.
* Calling this method will automatically fire any associated
* {@link Lifecycle} event. It will also check that any attempted state
* transition is valid for a sub-class.
*
* @param state The new state for this component
* @param data The data to pass to the associated {@link Lifecycle} event
* @throws LifecycleException when attempting to set an invalid state
*/
protected synchronized void setState(LifecycleState state, Object data)
throws LifecycleException {
setStateInternal(state, data, true);
}
private synchronized void setStateInternal(LifecycleState state,
Object data, boolean check) throws LifecycleException {
if (log.isDebugEnabled()) {
log.debug(sm.getString("lifecycleBase.setState", this, state));
}
if (check) {
// Must have been triggered by one of the abstract methods (assume
// code in this class is correct)
// null is never a valid state
if (state == null) {
invalidTransition("null");
// Unreachable code - here to stop eclipse complaining about
// a possible NPE further down the method
return;
}
// Any method can transition to failed
// startInternal() permits STARTING_PREP to STARTING
// stopInternal() permits STOPPING_PREP to STOPPING and FAILED to
// STOPPING
if (!(state == LifecycleState.FAILED ||
(this.state == LifecycleState.STARTING_PREP &&
state == LifecycleState.STARTING) ||
(this.state == LifecycleState.STOPPING_PREP &&
state == LifecycleState.STOPPING) ||
(this.state == LifecycleState.FAILED &&
state == LifecycleState.STOPPING))) {
// No other transition permitted
invalidTransition(state.name());
}
}
this.state = state;
String lifecycleEvent = state.getLifecycleEvent();
if (lifecycleEvent != null) {
fireLifecycleEvent(lifecycleEvent, data);
}
}
private void invalidTransition(String type) throws LifecycleException {
String msg = sm.getString("lifecycleBase.invalidTransition", type,
toString(), state);
throw new LifecycleException(msg);
}
}
|
java/org/apache/catalina/util/LifecycleBase.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.util;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.LifecycleEvent;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleListener;
import org.apache.catalina.LifecycleState;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.res.StringManager;
/**
* Base implementation of the {@link Lifecycle} interface that implements the
* state transition rules for {@link Lifecycle#start()} and
* {@link Lifecycle#stop()}
*/
public abstract class LifecycleBase implements Lifecycle {
private static final Log log = LogFactory.getLog(LifecycleBase.class);
private static final StringManager sm = StringManager.getManager(LifecycleBase.class);
/**
* The list of registered LifecycleListeners for event notifications.
*/
private final List<LifecycleListener> lifecycleListeners = new CopyOnWriteArrayList<>();
/**
* The current state of the source component.
*/
private volatile LifecycleState state = LifecycleState.NEW;
/**
* {@inheritDoc}
*/
@Override
public void addLifecycleListener(LifecycleListener listener) {
lifecycleListeners.add(listener);
}
/**
* {@inheritDoc}
*/
@Override
public LifecycleListener[] findLifecycleListeners() {
return lifecycleListeners.toArray(new LifecycleListener[0]);
}
/**
* {@inheritDoc}
*/
@Override
public void removeLifecycleListener(LifecycleListener listener) {
lifecycleListeners.remove(listener);
}
/**
* Allow sub classes to fire {@link Lifecycle} events.
*
* @param type Event type
* @param data Data associated with event.
*/
protected void fireLifecycleEvent(String type, Object data) {
LifecycleEvent event = new LifecycleEvent(this, type, data);
for (LifecycleListener listener : lifecycleListeners) {
listener.lifecycleEvent(event);
}
}
@Override
public final synchronized void init() throws LifecycleException {
if (!state.equals(LifecycleState.NEW)) {
invalidTransition(Lifecycle.BEFORE_INIT_EVENT);
}
setStateInternal(LifecycleState.INITIALIZING, null, false);
try {
initInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(
sm.getString("lifecycleBase.initFail",toString()), t);
}
setStateInternal(LifecycleState.INITIALIZED, null, false);
}
protected abstract void initInternal() throws LifecycleException;
/**
* {@inheritDoc}
*/
@Override
public final synchronized void start() throws LifecycleException {
if (LifecycleState.STARTING_PREP.equals(state) || LifecycleState.STARTING.equals(state) ||
LifecycleState.STARTED.equals(state)) {
if (log.isDebugEnabled()) {
Exception e = new LifecycleException();
log.debug(sm.getString("lifecycleBase.alreadyStarted", toString()), e);
} else if (log.isInfoEnabled()) {
log.info(sm.getString("lifecycleBase.alreadyStarted", toString()));
}
return;
}
if (state.equals(LifecycleState.NEW)) {
init();
} else if (state.equals(LifecycleState.FAILED)) {
stop();
} else if (!state.equals(LifecycleState.INITIALIZED) &&
!state.equals(LifecycleState.STOPPED)) {
invalidTransition(Lifecycle.BEFORE_START_EVENT);
}
setStateInternal(LifecycleState.STARTING_PREP, null, false);
try {
startInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(sm.getString("lifecycleBase.startFail", toString()), t);
}
// Shouldn't be necessary but acts as a check that sub-classes are
// doing what they are supposed to.
if (!state.equals(LifecycleState.STARTING)) {
invalidTransition(Lifecycle.AFTER_START_EVENT);
}
setStateInternal(LifecycleState.STARTED, null, false);
}
/**
* Sub-classes must ensure that the state is changed to
* {@link LifecycleState#STARTING} during the execution of this method.
* Changing state will trigger the {@link Lifecycle#START_EVENT} event.
*
* If a component fails to start it may either throw a
* {@link LifecycleException} which will cause it's parent to fail to start
* or it can place itself in the error state in which case {@link #stop()}
* will be called on the failed component but the parent component will
* continue to start normally.
*
* @throws LifecycleException Start error occurred
*/
protected abstract void startInternal() throws LifecycleException;
/**
* {@inheritDoc}
*/
@Override
public final synchronized void stop() throws LifecycleException {
if (LifecycleState.STOPPING_PREP.equals(state) || LifecycleState.STOPPING.equals(state) ||
LifecycleState.STOPPED.equals(state)) {
if (log.isDebugEnabled()) {
Exception e = new LifecycleException();
log.debug(sm.getString("lifecycleBase.alreadyStopped", toString()), e);
} else if (log.isInfoEnabled()) {
log.info(sm.getString("lifecycleBase.alreadyStopped", toString()));
}
return;
}
if (state.equals(LifecycleState.NEW)) {
state = LifecycleState.STOPPED;
return;
}
if (!state.equals(LifecycleState.STARTED) && !state.equals(LifecycleState.FAILED)) {
invalidTransition(Lifecycle.BEFORE_STOP_EVENT);
}
if (state.equals(LifecycleState.FAILED)) {
// Don't transition to STOPPING_PREP as that would briefly mark the
// component as available but do ensure the BEFORE_STOP_EVENT is
// fired
fireLifecycleEvent(BEFORE_STOP_EVENT, null);
} else {
setStateInternal(LifecycleState.STOPPING_PREP, null, false);
}
try {
stopInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(sm.getString("lifecycleBase.stopFail",toString()), t);
} finally {
if (this instanceof Lifecycle.SingleUse) {
// Complete stop process first
setStateInternal(LifecycleState.STOPPED, null, false);
destroy();
return;
}
}
// Shouldn't be necessary but acts as a check that sub-classes are
// doing what they are supposed to.
if (!state.equals(LifecycleState.STOPPING) && !state.equals(LifecycleState.FAILED)) {
invalidTransition(Lifecycle.AFTER_STOP_EVENT);
}
setStateInternal(LifecycleState.STOPPED, null, false);
}
/**
* Sub-classes must ensure that the state is changed to
* {@link LifecycleState#STOPPING} during the execution of this method.
* Changing state will trigger the {@link Lifecycle#STOP_EVENT} event.
*
* @throws LifecycleException Stop error occurred
*/
protected abstract void stopInternal() throws LifecycleException;
@Override
public final synchronized void destroy() throws LifecycleException {
if (LifecycleState.FAILED.equals(state)) {
try {
// Triggers clean-up
stop();
} catch (LifecycleException e) {
// Just log. Still want to destroy.
log.warn(sm.getString(
"lifecycleBase.destroyStopFail", toString()), e);
}
}
if (LifecycleState.DESTROYING.equals(state) ||
LifecycleState.DESTROYED.equals(state)) {
if (log.isDebugEnabled()) {
Exception e = new LifecycleException();
log.debug(sm.getString("lifecycleBase.alreadyDestroyed", toString()), e);
} else if (log.isInfoEnabled() && !(this instanceof Lifecycle.SingleUse)) {
// Rather than have every component that might need to call
// destroy() check for SingleUse, don't log an info message if
// multiple calls are made to destroy()
log.info(sm.getString("lifecycleBase.alreadyDestroyed", toString()));
}
return;
}
if (!state.equals(LifecycleState.STOPPED) &&
!state.equals(LifecycleState.FAILED) &&
!state.equals(LifecycleState.NEW) &&
!state.equals(LifecycleState.INITIALIZED)) {
invalidTransition(Lifecycle.BEFORE_DESTROY_EVENT);
}
setStateInternal(LifecycleState.DESTROYING, null, false);
try {
destroyInternal();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
setStateInternal(LifecycleState.FAILED, null, false);
throw new LifecycleException(
sm.getString("lifecycleBase.destroyFail",toString()), t);
}
setStateInternal(LifecycleState.DESTROYED, null, false);
}
protected abstract void destroyInternal() throws LifecycleException;
/**
* {@inheritDoc}
*/
@Override
public LifecycleState getState() {
return state;
}
/**
* {@inheritDoc}
*/
@Override
public String getStateName() {
return getState().toString();
}
/**
* Provides a mechanism for sub-classes to update the component state.
* Calling this method will automatically fire any associated
* {@link Lifecycle} event. It will also check that any attempted state
* transition is valid for a sub-class.
*
* @param state The new state for this component
* @throws LifecycleException when attempting to set an invalid state
*/
protected synchronized void setState(LifecycleState state)
throws LifecycleException {
setStateInternal(state, null, true);
}
/**
* Provides a mechanism for sub-classes to update the component state.
* Calling this method will automatically fire any associated
* {@link Lifecycle} event. It will also check that any attempted state
* transition is valid for a sub-class.
*
* @param state The new state for this component
* @param data The data to pass to the associated {@link Lifecycle} event
* @throws LifecycleException when attempting to set an invalid state
*/
protected synchronized void setState(LifecycleState state, Object data)
throws LifecycleException {
setStateInternal(state, data, true);
}
private synchronized void setStateInternal(LifecycleState state,
Object data, boolean check) throws LifecycleException {
if (log.isDebugEnabled()) {
log.debug(sm.getString("lifecycleBase.setState", this, state));
}
if (check) {
// Must have been triggered by one of the abstract methods (assume
// code in this class is correct)
// null is never a valid state
if (state == null) {
invalidTransition("null");
// Unreachable code - here to stop eclipse complaining about
// a possible NPE further down the method
return;
}
// Any method can transition to failed
// startInternal() permits STARTING_PREP to STARTING
// stopInternal() permits STOPPING_PREP to STOPPING and FAILED to
// STOPPING
if (!(state == LifecycleState.FAILED ||
(this.state == LifecycleState.STARTING_PREP &&
state == LifecycleState.STARTING) ||
(this.state == LifecycleState.STOPPING_PREP &&
state == LifecycleState.STOPPING) ||
(this.state == LifecycleState.FAILED &&
state == LifecycleState.STOPPING))) {
// No other transition permitted
invalidTransition(state.name());
}
}
this.state = state;
String lifecycleEvent = state.getLifecycleEvent();
if (lifecycleEvent != null) {
fireLifecycleEvent(lifecycleEvent, data);
}
}
private void invalidTransition(String type) throws LifecycleException {
String msg = sm.getString("lifecycleBase.invalidTransition", type,
toString(), state);
throw new LifecycleException(msg);
}
}
|
Further fix to regression in r1725599
Trigger a call to stop() if a component puts itself into the FAILED
state during start().
git-svn-id: 79cef5a5a257cc9dbe40a45ac190115b4780e2d0@1725694 13f79535-47bb-0310-9956-ffa450edef68
|
java/org/apache/catalina/util/LifecycleBase.java
|
Further fix to regression in r1725599 Trigger a call to stop() if a component puts itself into the FAILED state during start().
|
|
Java
|
apache-2.0
|
7fdd7fe92a4748161df5dda3267789af1f820883
| 0
|
finmath/finmath-lib,finmath/finmath-lib
|
/*
* (c) Copyright Christian P. Fries, Germany. Contact: email@christian-fries.de.
*
* Created on 09.02.2004
*/
package net.finmath.montecarlo.interestrate;
import net.finmath.montecarlo.interestrate.products.AbstractLIBORMonteCarloProduct;
import net.finmath.stochastic.RandomVariable;
import net.finmath.stochastic.Scalar;
/**
* A class for calibration products, that is a triple (P,V,w) where P is a product, V is a target value and w is a weight.
*
* @author Christian Fries
*/
public class CalibrationProduct {
private final String name;
private final AbstractLIBORMonteCarloProduct product;
private final RandomVariable targetValue;
private final double weight;
/**
* Construct a calibration product. A calibration product consists of a product implementing {@link AbstractLIBORMonteCarloProduct}, a target value
* given as {@link RandomVariable} and a weight. In addition you may give a short name which may be printed by the logger.
*
* @param name A short name (for example a "SYMBOL" representing the product.
* @param product The product.
* @param targetValue The target value.
* @param weight The calibration weight.
*/
public CalibrationProduct(String name, AbstractLIBORMonteCarloProduct product, RandomVariable targetValue, double weight) {
super();
this.name = name;
this.product = product;
this.targetValue = targetValue;
this.weight = weight;
}
public CalibrationProduct(String name, AbstractLIBORMonteCarloProduct product, double targetValue, double weight) {
this(name, product, new Scalar(targetValue), weight);
}
public CalibrationProduct(AbstractLIBORMonteCarloProduct product, RandomVariable targetValue, double weight) {
this(null, product, targetValue, weight);
}
public CalibrationProduct(AbstractLIBORMonteCarloProduct product, double targetValue, double weight) {
this(product, new Scalar(targetValue), weight);
}
/**
* The method returns a short name for this calibration product. If no short name was given, the method returns <code>getProduct().toString()</code>.
*
* @return A short name for this calibration product. If no short name was given, the method returns <code>getProduct().toString()</code>
*/
public String getName() {
if(name != null) return name;
else return product.toString();
}
/**
* @return the product.
*/
public AbstractLIBORMonteCarloProduct getProduct() {
return product;
}
/**
* @return the target value.
*/
public RandomVariable getTargetValue() {
return targetValue;
}
/**
* @return the calibrationWeight
*/
public double getWeight() {
return weight;
}
@Override
public String toString() {
return "CalibrationProduct [product=" + getProduct()
+ ", targetValue=" + getTargetValue()
+ ", weight=" + getWeight() + "]";
}
}
|
src/main/java/net/finmath/montecarlo/interestrate/CalibrationProduct.java
|
/*
* (c) Copyright Christian P. Fries, Germany. Contact: email@christian-fries.de.
*
* Created on 09.02.2004
*/
package net.finmath.montecarlo.interestrate;
import net.finmath.montecarlo.interestrate.products.AbstractLIBORMonteCarloProduct;
import net.finmath.stochastic.RandomVariable;
import net.finmath.stochastic.Scalar;
/**
* A class for calibration products, that is a tripple (P,V,w) where P is a product, V is a target value and w is a weight.
*
* @author Christian Fries
*/
public class CalibrationProduct {
private final AbstractLIBORMonteCarloProduct product;
private final RandomVariable targetValue;
private final double weight;
public CalibrationProduct(AbstractLIBORMonteCarloProduct product, RandomVariable targetValue, double weight) {
super();
this.product = product;
this.targetValue = targetValue;
this.weight = weight;
}
public CalibrationProduct(AbstractLIBORMonteCarloProduct product, double targetValue, double weight) {
super();
this.product = product;
this.targetValue = new Scalar(targetValue);
this.weight = weight;
}
/**
* @return the product.
*/
public AbstractLIBORMonteCarloProduct getProduct() {
return product;
}
/**
* @return the target value.
*/
public RandomVariable getTargetValue() {
return targetValue;
}
/**
* @return the calibrationWeight
*/
public double getWeight() {
return weight;
}
@Override
public String toString() {
return "CalibrationProduct [product=" + getProduct()
+ ", targetValue=" + getTargetValue()
+ ", weight=" + getWeight() + "]";
}
}
|
Added name.
|
src/main/java/net/finmath/montecarlo/interestrate/CalibrationProduct.java
|
Added name.
|
|
Java
|
apache-2.0
|
c83f5919085272101ef8ba7d53088b53f2dd701f
| 0
|
callMeDimit/commons-digester,callMeDimit/commons-digester,callMeDimit/commons-digester,mohanaraosv/commons-digester,apache/commons-digester,mohanaraosv/commons-digester,apache/commons-digester,mohanaraosv/commons-digester,apache/commons-digester
|
package org.apache.commons.digester3.binder;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import static java.lang.String.format;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.digester3.RuleSet;
/**
* The default Digester EDSL implementation.
*
* @since 3.0
*/
final class DefaultRulesBinder
implements RulesBinder
{
/**
* Errors that can occur during binding time or rules creation.
*/
private final List<ErrorMessage> errors = new ArrayList<ErrorMessage>();
/**
*
*/
private final FromBinderRuleSet fromBinderRuleSet = new FromBinderRuleSet();
/**
*
*/
private ClassLoader classLoader;
/**
*
*
* @param classLoader
*/
void initialize( ClassLoader classLoader )
{
this.classLoader = classLoader;
fromBinderRuleSet.clear();
errors.clear();
}
/**
* {@inheritDoc}
*/
public ClassLoader getContextClassLoader()
{
return this.classLoader;
}
/**
* {@inheritDoc}
*/
public void addError( String messagePattern, Object... arguments )
{
StackTraceElement[] stackTrace = new Exception().getStackTrace();
StackTraceElement element = null;
int stackIndex = stackTrace.length - 1;
while ( element == null && stackIndex > 0 ) // O(n) there's no better way
{
Class<?> moduleClass;
try
{
// check if the set ClassLoader resolves the Class in the StackTrace
moduleClass = Class.forName( stackTrace[stackIndex].getClassName(), false, this.classLoader );
}
catch ( ClassNotFoundException e )
{
try
{
// try otherwise with current ClassLoader
moduleClass =
Class.forName( stackTrace[stackIndex].getClassName(), false, this.getClass().getClassLoader() );
}
catch ( ClassNotFoundException e1 )
{
// Class in the StackTrace can't be found, don't write the file name:line number detail in the
// message
moduleClass = null;
}
}
if ( moduleClass != null && RulesModule.class.isAssignableFrom( moduleClass ) )
{
element = stackTrace[stackIndex];
}
stackIndex--;
}
if ( element != null )
{
messagePattern = format( "%s (%s:%s)", messagePattern, element.getFileName(), element.getLineNumber() );
}
addError( new ErrorMessage( messagePattern, arguments ) );
}
/**
* {@inheritDoc}
*/
public void addError( Throwable t )
{
String message = "An exception was caught and reported. Message: " + t.getMessage();
addError( new ErrorMessage( message, t ) );
}
/**
* Records an error, the full details of which will be logged, and the message of which will be presented to the
* user at a later time.
*
* @param errorMessage The error to record.
*/
private void addError( ErrorMessage errorMessage )
{
this.errors.add( errorMessage );
}
/**
* {@inheritDoc}
*/
public void install( RulesModule rulesModule )
{
rulesModule.configure( this );
}
/**
* {@inheritDoc}
*/
public LinkedRuleBuilder forPattern( String pattern )
{
final String keyPattern;
if ( pattern == null || pattern.length() == 0 )
{
addError( "Null or empty pattern is not valid" );
keyPattern = null;
}
else
{
if ( pattern.endsWith( "/" ) )
{
// to help users who accidently add '/' to the end of their patterns
keyPattern = pattern.substring( 0, pattern.length() - 1 );
}
else
{
keyPattern = pattern;
}
}
return new LinkedRuleBuilder( this, fromBinderRuleSet, classLoader, keyPattern );
}
/**
*
*
* @return
*/
boolean hasError()
{
return !errors.isEmpty();
}
/**
*
*
* @return
*/
int errorsSize()
{
return errors.size();
}
/**
*
*
* @return
*/
Iterable<ErrorMessage> getErrors()
{
return errors;
}
/**
*
*
* @return
*/
RuleSet getFromBinderRuleSet()
{
return fromBinderRuleSet;
}
}
|
src/main/java/org/apache/commons/digester3/binder/DefaultRulesBinder.java
|
package org.apache.commons.digester3.binder;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import static java.lang.String.format;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.digester3.RuleSet;
/**
* The default Digester EDSL implementation.
*
* @since 3.0
*/
final class DefaultRulesBinder
implements RulesBinder
{
/**
* Errors that can occur during binding time or rules creation.
*/
private final List<ErrorMessage> errors = new ArrayList<ErrorMessage>();
/**
*
*/
private final FromBinderRuleSet fromBinderRuleSet = new FromBinderRuleSet();
/**
*
*/
private ClassLoader classLoader;
/**
*
*
* @param classLoader
*/
void initialize( ClassLoader classLoader )
{
this.classLoader = classLoader;
fromBinderRuleSet.clear();
errors.clear();
}
/**
* {@inheritDoc}
*/
public ClassLoader getContextClassLoader()
{
return this.classLoader;
}
/**
* {@inheritDoc}
*/
public void addError( String messagePattern, Object... arguments )
{
StackTraceElement[] stackTrace = new Exception().getStackTrace();
StackTraceElement element = null;
int stackIndex = stackTrace.length - 1;
while ( element == null && stackIndex > 0 ) // O(n) there's no better way
{
Class<?> moduleClass;
try
{
// check if the set ClassLoader resolves the Class in the StackTrace
moduleClass = Class.forName( stackTrace[stackIndex].getClassName(), false, this.classLoader );
}
catch ( ClassNotFoundException e )
{
try
{
// try otherwise with current ClassLoader
moduleClass =
Class.forName( stackTrace[stackIndex].getClassName(), false, this.getClass().getClassLoader() );
}
catch ( ClassNotFoundException e1 )
{
// Class in the StackTrace can't be found, don't write the file name:line number detail in the
// message
moduleClass = null;
}
}
if ( moduleClass != null )
{
if ( RulesModule.class.isAssignableFrom( moduleClass ) )
{
element = stackTrace[stackIndex];
}
}
stackIndex--;
}
if ( element != null )
{
messagePattern = format( "%s (%s:%s)", messagePattern, element.getFileName(), element.getLineNumber() );
}
addError( new ErrorMessage( messagePattern, arguments ) );
}
/**
* {@inheritDoc}
*/
public void addError( Throwable t )
{
String message = "An exception was caught and reported. Message: " + t.getMessage();
addError( new ErrorMessage( message, t ) );
}
/**
* Records an error, the full details of which will be logged, and the message of which will be presented to the
* user at a later time.
*
* @param errorMessage The error to record.
*/
private void addError( ErrorMessage errorMessage )
{
this.errors.add( errorMessage );
}
/**
* {@inheritDoc}
*/
public void install( RulesModule rulesModule )
{
rulesModule.configure( this );
}
/**
* {@inheritDoc}
*/
public LinkedRuleBuilder forPattern( String pattern )
{
final String keyPattern;
if ( pattern == null || pattern.length() == 0 )
{
addError( "Null or empty pattern is not valid" );
keyPattern = null;
}
else
{
if ( pattern.endsWith( "/" ) )
{
// to help users who accidently add '/' to the end of their patterns
keyPattern = pattern.substring( 0, pattern.length() - 1 );
}
else
{
keyPattern = pattern;
}
}
return new LinkedRuleBuilder( this, fromBinderRuleSet, classLoader, keyPattern );
}
/**
*
*
* @return
*/
boolean hasError()
{
return !errors.isEmpty();
}
/**
*
*
* @return
*/
int errorsSize()
{
return errors.size();
}
/**
*
*
* @return
*/
Iterable<ErrorMessage> getErrors()
{
return errors;
}
/**
*
*
* @return
*/
RuleSet getFromBinderRuleSet()
{
return fromBinderRuleSet;
}
}
|
fixed PMD violation: These nested if statements could be combined
git-svn-id: 871f264856ddff118359d15337bbbf32ea57c748@1187569 13f79535-47bb-0310-9956-ffa450edef68
|
src/main/java/org/apache/commons/digester3/binder/DefaultRulesBinder.java
|
fixed PMD violation: These nested if statements could be combined
|
|
Java
|
apache-2.0
|
6d581ccb9dd146646d0f8cb23b0a8ffcce6f687c
| 0
|
josephw/maven,atanasenko/maven,wangyuesong0/maven,kidaa/maven-1,changbai1980/maven,wangyuesong0/maven,trajano/maven,apache/maven,dsyer/maven,Mounika-Chirukuri/maven,skitt/maven,Tibor17/maven,gorcz/maven,Distrotech/maven,mcculls/maven,lbndev/maven,barthel/maven,ChristianSchulte/maven,ChristianSchulte/maven,cstamas/maven,stephenc/maven,xasx/maven,lbndev/maven,atanasenko/maven,aheritier/maven,changbai1980/maven,njuneau/maven,keith-turner/maven,Mounika-Chirukuri/maven,cstamas/maven,dsyer/maven,olamy/maven,skitt/maven,lbndev/maven,karthikjaps/maven,njuneau/maven,wangyuesong/maven,likaiwalkman/maven,josephw/maven,stephenc/maven,vedmishr/demo1,likaiwalkman/maven,barthel/maven,pkozelka/maven,wangyuesong/maven,xasx/maven,olamy/maven,rogerchina/maven,trajano/maven,keith-turner/maven,mizdebsk/maven,skitt/maven,wangyuesong/maven,Distrotech/maven,ChristianSchulte/maven,aheritier/maven,apache/maven,kidaa/maven-1,kidaa/maven-1,barthel/maven,apache/maven,rogerchina/maven,keith-turner/maven,Mounika-Chirukuri/maven,cstamas/maven,wangyuesong0/maven,njuneau/maven,vedmishr/demo1,mizdebsk/maven,likaiwalkman/maven,aheritier/maven,pkozelka/maven,mizdebsk/maven,josephw/maven,rogerchina/maven,changbai1980/maven,Tibor17/maven,runepeter/maven-deploy-plugin-2.8.1,karthikjaps/maven,gorcz/maven,stephenc/maven,xasx/maven,atanasenko/maven,vedmishr/demo1,pkozelka/maven,karthikjaps/maven,gorcz/maven,mcculls/maven,dsyer/maven,mcculls/maven,olamy/maven,trajano/maven,runepeter/maven-deploy-plugin-2.8.1
|
package org.apache.maven.plugin;
/*
* Copyright 2001-2004 The Apache Software Foundation. Licensed under the Apache
* License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.artifact.MavenMetadataSource;
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.artifact.resolver.filter.ExclusionSetFilter;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptorBuilder;
import org.codehaus.plexus.ArtifactEnabledContainer;
import org.codehaus.plexus.PlexusConstants;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.component.discovery.ComponentDiscoveryEvent;
import org.codehaus.plexus.component.discovery.ComponentDiscoveryListener;
import org.codehaus.plexus.component.repository.ComponentSetDescriptor;
import org.codehaus.plexus.context.Context;
import org.codehaus.plexus.context.ContextException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.util.dag.CycleDetectedException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DefaultPluginManager
extends AbstractLogEnabled
implements PluginManager, ComponentDiscoveryListener, Initializable, Contextualizable
{
static String MAVEN_PLUGIN = "maven-plugin";
protected Map mojoDescriptors;
protected Map pluginDescriptors;
protected ArtifactResolver artifactResolver;
protected ArtifactHandlerManager artifactHandlerManager;
protected PlexusContainer container;
protected PluginDescriptorBuilder pluginDescriptorBuilder;
protected Set remotePluginRepositories;
protected ArtifactRepository localRepository;
protected ArtifactFilter artifactFilter;
public DefaultPluginManager()
{
mojoDescriptors = new HashMap();
pluginDescriptors = new HashMap();
pluginDescriptorBuilder = new PluginDescriptorBuilder();
}
// ----------------------------------------------------------------------
// Goal descriptors
// ----------------------------------------------------------------------
public Map getMojoDescriptors()
{
return mojoDescriptors;
}
public MojoDescriptor getMojoDescriptor( String name )
{
return (MojoDescriptor) mojoDescriptors.get( name );
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
private Set pluginsInProcess = new HashSet();
public void processPluginDescriptor( MavenPluginDescriptor mavenPluginDescriptor )
throws CycleDetectedException
{
if ( pluginsInProcess.contains( mavenPluginDescriptor.getPluginId() ) )
{
return;
}
pluginsInProcess.add( mavenPluginDescriptor.getPluginId() );
PluginDescriptor pluginDescriptor = mavenPluginDescriptor.getPluginDescriptor();
for ( Iterator it = mavenPluginDescriptor.getMavenMojoDescriptors().iterator(); it.hasNext(); )
{
MavenMojoDescriptor mavenMojoDescriptor = (MavenMojoDescriptor) it.next();
MojoDescriptor mojoDescriptor = mavenMojoDescriptor.getMojoDescriptor();
mojoDescriptors.put( mojoDescriptor.getId(), mojoDescriptor );
pluginDescriptors.put( pluginDescriptor.getId(), pluginDescriptor );
}
}
// ----------------------------------------------------------------------
// Plugin discovery
// ----------------------------------------------------------------------
public void componentDiscovered( ComponentDiscoveryEvent event )
{
ComponentSetDescriptor componentSetDescriptor = event.getComponentSetDescriptor();
if ( !( componentSetDescriptor instanceof MavenPluginDescriptor ) )
{
return;
}
MavenPluginDescriptor pluginDescriptor = (MavenPluginDescriptor) componentSetDescriptor;
try
{
processPluginDescriptor( pluginDescriptor );
}
catch ( CycleDetectedException e )
{
getLogger().error( "A cycle was detected in the goal graph: ", e );
}
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
public boolean isPluginInstalled( String pluginId )
{
return pluginDescriptors.containsKey( pluginId );
}
private String getPluginId( String goalName )
{
if ( goalName.indexOf( ":" ) > 0 )
{
return goalName.substring( 0, goalName.indexOf( ":" ) );
}
return goalName;
}
public void verifyPluginForGoal( String goalName ) throws Exception
{
String pluginId = getPluginId( goalName );
if ( !isPluginInstalled( pluginId ) )
{
//!! This is entirely crappy. We need a better naming for plugin
// artifact ids and
// we definitely need better version extraction support.
String artifactId = "maven-" + pluginId + "-plugin";
String version = "1.0-SNAPSHOT";
Artifact pluginArtifact = new DefaultArtifact( "maven", artifactId, version, "plugin", "jar" );
addPlugin( pluginArtifact );
// Now, we need to resolve the plugins for this goal's prereqs.
MojoDescriptor mojoDescriptor = getMojoDescriptor( goalName );
List prereqs = mojoDescriptor.getPrereqs();
if ( prereqs != null )
{
for ( Iterator it = prereqs.iterator(); it.hasNext(); )
{
String prereq = (String) it.next();
verifyPluginForGoal( prereq );
}
}
}
}
public void addPlugin( Artifact pluginArtifact )
throws Exception
{
artifactResolver = (ArtifactResolver) container.lookup( ArtifactResolver.ROLE );
MavenMetadataSource metadataSource = new MavenMetadataSource( remotePluginRepositories,
localRepository,
artifactResolver );
( (ArtifactEnabledContainer) container ).addComponent( pluginArtifact,
artifactResolver,
remotePluginRepositories,
localRepository,
metadataSource,
artifactFilter );
}
public void contextualize( Context context )
throws ContextException
{
container = (ArtifactEnabledContainer) context.get( PlexusConstants.PLEXUS_KEY );
}
public void initialize()
throws Exception
{
artifactFilter = new ExclusionSetFilter( new String[]
{
"maven-core",
"maven-artifact",
"maven-model",
"maven-plugin",
"plexus",
"xpp3",
"classworlds",
"ognl"
} );
// TODO: move this to be configurable from the Maven component
remotePluginRepositories = new HashSet();
// TODO: needs to be configured from the POM element
remotePluginRepositories.add( new ArtifactRepository( "plugin-repository", "http://repo1.maven.org" ) );
}
// TODO: is this needed or can it be found from the session?
public ArtifactRepository getLocalRepository()
{
return localRepository;
}
// TODO: is this needed or can it be found from the session? It is currently set from the session
public void setLocalRepository( ArtifactRepository localRepository )
{
this.localRepository = localRepository;
}
}
|
maven-core/src/main/java/org/apache/maven/plugin/DefaultPluginManager.java
|
package org.apache.maven.plugin;
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.artifact.MavenMetadataSource;
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.artifact.resolver.filter.ExclusionSetFilter;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptorBuilder;
import org.codehaus.plexus.ArtifactEnabledContainer;
import org.codehaus.plexus.PlexusConstants;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.component.discovery.ComponentDiscoveryEvent;
import org.codehaus.plexus.component.discovery.ComponentDiscoveryListener;
import org.codehaus.plexus.component.repository.ComponentSetDescriptor;
import org.codehaus.plexus.context.Context;
import org.codehaus.plexus.context.ContextException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.util.dag.CycleDetectedException;
public class DefaultPluginManager
extends AbstractLogEnabled
implements PluginManager, ComponentDiscoveryListener, Initializable, Contextualizable
{
static String MAVEN_PLUGIN = "maven-plugin";
protected Map mojoDescriptors;
protected Map pluginDescriptors;
protected ArtifactResolver artifactResolver;
protected ArtifactHandlerManager artifactHandlerManager;
protected PlexusContainer container;
protected PluginDescriptorBuilder pluginDescriptorBuilder;
protected Set remotePluginRepositories;
protected ArtifactRepository localRepository;
protected ArtifactFilter artifactFilter;
public DefaultPluginManager()
{
mojoDescriptors = new HashMap();
pluginDescriptors = new HashMap();
pluginDescriptorBuilder = new PluginDescriptorBuilder();
}
// ----------------------------------------------------------------------
// Goal descriptors
// ----------------------------------------------------------------------
public Map getMojoDescriptors()
{
return mojoDescriptors;
}
public MojoDescriptor getMojoDescriptor( String name )
{
return (MojoDescriptor) mojoDescriptors.get( name );
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
private Set pluginsInProcess = new HashSet();
public void processPluginDescriptor( MavenPluginDescriptor mavenPluginDescriptor )
throws CycleDetectedException
{
if ( pluginsInProcess.contains( mavenPluginDescriptor.getPluginId() ) )
{
return;
}
pluginsInProcess.add( mavenPluginDescriptor.getPluginId() );
PluginDescriptor pluginDescriptor = mavenPluginDescriptor.getPluginDescriptor();
for ( Iterator it = mavenPluginDescriptor.getMavenMojoDescriptors().iterator(); it.hasNext(); )
{
MavenMojoDescriptor mavenMojoDescriptor = (MavenMojoDescriptor) it.next();
MojoDescriptor mojoDescriptor = mavenMojoDescriptor.getMojoDescriptor();
mojoDescriptors.put( mojoDescriptor.getId(), mojoDescriptor );
pluginDescriptors.put( pluginDescriptor.getId(), pluginDescriptor );
}
}
// ----------------------------------------------------------------------
// Plugin discovery
// ----------------------------------------------------------------------
public void componentDiscovered( ComponentDiscoveryEvent event )
{
ComponentSetDescriptor componentSetDescriptor = event.getComponentSetDescriptor();
if ( !( componentSetDescriptor instanceof MavenPluginDescriptor ) )
{
return;
}
MavenPluginDescriptor pluginDescriptor = (MavenPluginDescriptor) componentSetDescriptor;
try
{
processPluginDescriptor( pluginDescriptor );
}
catch ( CycleDetectedException e )
{
getLogger().error( "A cycle was detected in the goal graph: ", e );
}
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
public boolean isPluginInstalled( String pluginId )
{
return pluginDescriptors.containsKey( pluginId );
}
private String getPluginId( String goalName )
{
if ( goalName.indexOf( ":" ) > 0 )
{
return goalName.substring( 0, goalName.indexOf( ":" ) );
}
return goalName;
}
public void verifyPluginForGoal( String goalName ) throws Exception
{
String pluginId = getPluginId( goalName );
if ( !isPluginInstalled( pluginId ) )
{
//!! This is entirely crappy. We need a better naming for plugin
// artifact ids and
// we definitely need better version extraction support.
String artifactId = "maven-" + pluginId + "-plugin";
String version = "1.0-SNAPSHOT";
Artifact pluginArtifact = new DefaultArtifact( "maven", artifactId, version, "plugin", "jar" );
addPlugin( pluginArtifact );
// Now, we need to resolve the plugins for this goal's prereqs.
MojoDescriptor mojoDescriptor = getMojoDescriptor( goalName );
if ( mojoDescriptor == null )
{
throw new Exception( "Could not find a mojo descriptor for goal: '" + goalName + "'." );
}
List prereqs = mojoDescriptor.getPrereqs();
if ( prereqs != null )
{
for ( Iterator it = prereqs.iterator(); it.hasNext(); )
{
String prereq = (String) it.next();
verifyPluginForGoal( prereq );
}
}
}
}
public void addPlugin( Artifact pluginArtifact )
throws Exception
{
artifactResolver = (ArtifactResolver) container.lookup( ArtifactResolver.ROLE );
MavenMetadataSource metadataSource = new MavenMetadataSource( remotePluginRepositories,
localRepository,
artifactResolver );
( (ArtifactEnabledContainer) container ).addComponent( pluginArtifact,
artifactResolver,
remotePluginRepositories,
localRepository,
metadataSource,
artifactFilter );
}
public void contextualize( Context context )
throws ContextException
{
container = (ArtifactEnabledContainer) context.get( PlexusConstants.PLEXUS_KEY );
}
public void initialize()
throws Exception
{
artifactFilter = new ExclusionSetFilter( new String[]
{
"maven-core",
"maven-artifact",
"maven-model",
"maven-plugin",
"plexus",
"xstream",
"xpp3",
"classworlds",
"ognl"
} );
// TODO: move this to be configurable from the Maven component
remotePluginRepositories = new HashSet();
// TODO: needs to be configured from the POM element
remotePluginRepositories.add( new ArtifactRepository( "plugin-repository", "http://repo1.maven.org" ) );
}
// TODO: is this needed or can it be found from the session?
public ArtifactRepository getLocalRepository()
{
return localRepository;
}
// TODO: is this needed or can it be found from the session? It is currently set from the session
public void setLocalRepository( ArtifactRepository localRepository )
{
this.localRepository = localRepository;
}
}
|
o don't need to block out xstream anymore, it's not a core dep any longer
git-svn-id: 2c527eb49caa05e19d6b2be874bf74fa9d7ea670@163218 13f79535-47bb-0310-9956-ffa450edef68
|
maven-core/src/main/java/org/apache/maven/plugin/DefaultPluginManager.java
|
o don't need to block out xstream anymore, it's not a core dep any longer
|
|
Java
|
bsd-3-clause
|
8531559baad1751c586a82c516c1f1706367a8a9
| 0
|
wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy
|
/*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.replacements;
import static com.oracle.graal.api.code.MemoryBarriers.*;
import static com.oracle.graal.api.meta.DeoptimizationAction.*;
import static com.oracle.graal.api.meta.DeoptimizationReason.*;
import static com.oracle.graal.api.meta.LocationIdentity.*;
import static com.oracle.graal.nodes.java.ArrayLengthNode.*;
import java.util.*;
import com.oracle.graal.api.code.*;
import com.oracle.graal.api.meta.*;
import com.oracle.graal.api.replacements.*;
import com.oracle.graal.asm.*;
import com.oracle.graal.compiler.common.*;
import com.oracle.graal.compiler.common.type.*;
import com.oracle.graal.graph.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.nodes.HeapAccess.BarrierType;
import com.oracle.graal.nodes.calc.*;
import com.oracle.graal.nodes.extended.*;
import com.oracle.graal.nodes.java.*;
import com.oracle.graal.nodes.spi.*;
import com.oracle.graal.nodes.type.*;
import com.oracle.graal.nodes.virtual.*;
import com.oracle.graal.phases.util.*;
/**
* VM-independent lowerings for standard Java nodes. VM-specific methods are abstract and must be
* implemented by VM-specific subclasses.
*/
public abstract class DefaultJavaLoweringProvider implements LoweringProvider {
protected final MetaAccessProvider metaAccess;
protected final TargetDescription target;
private BoxingSnippets.Templates boxingSnippets;
public DefaultJavaLoweringProvider(MetaAccessProvider metaAccess, TargetDescription target) {
this.metaAccess = metaAccess;
this.target = target;
}
public void initialize(Providers providers, SnippetReflectionProvider snippetReflection) {
boxingSnippets = new BoxingSnippets.Templates(providers, snippetReflection, target);
}
@Override
public void lower(Node n, LoweringTool tool) {
if (n instanceof LoadFieldNode) {
lowerLoadFieldNode((LoadFieldNode) n, tool);
} else if (n instanceof StoreFieldNode) {
lowerStoreFieldNode((StoreFieldNode) n, tool);
} else if (n instanceof LoadIndexedNode) {
lowerLoadIndexedNode((LoadIndexedNode) n, tool);
} else if (n instanceof StoreIndexedNode) {
lowerStoreIndexedNode((StoreIndexedNode) n, tool);
} else if (n instanceof ArrayLengthNode) {
lowerArrayLengthNode((ArrayLengthNode) n, tool);
} else if (n instanceof LoadHubNode) {
lowerLoadHubNode((LoadHubNode) n);
} else if (n instanceof CompareAndSwapNode) {
lowerCompareAndSwapNode((CompareAndSwapNode) n);
} else if (n instanceof AtomicReadAndWriteNode) {
lowerAtomicReadAndWriteNode((AtomicReadAndWriteNode) n);
} else if (n instanceof UnsafeLoadNode) {
lowerUnsafeLoadNode((UnsafeLoadNode) n, tool);
} else if (n instanceof UnsafeStoreNode) {
lowerUnsafeStoreNode((UnsafeStoreNode) n);
} else if (n instanceof JavaReadNode) {
lowerJavaReadNode((JavaReadNode) n);
} else if (n instanceof JavaWriteNode) {
lowerJavaWriteNode((JavaWriteNode) n);
} else if (n instanceof CommitAllocationNode) {
lowerCommitAllocationNode((CommitAllocationNode) n, tool);
} else if (n instanceof BoxNode) {
boxingSnippets.lower((BoxNode) n, tool);
} else if (n instanceof UnboxNode) {
boxingSnippets.lower((UnboxNode) n, tool);
} else {
throw GraalInternalError.shouldNotReachHere("Node implementing Lowerable not handled: " + n);
}
}
protected void lowerLoadFieldNode(LoadFieldNode loadField, LoweringTool tool) {
assert loadField.getKind() != Kind.Illegal;
StructuredGraph graph = loadField.graph();
ResolvedJavaField field = loadField.field();
ValueNode object = loadField.isStatic() ? staticFieldBase(graph, field) : loadField.object();
Stamp loadStamp = loadStamp(loadField.stamp(), field.getKind(), true);
ConstantLocationNode location = createFieldLocation(graph, field, false);
assert location != null : "Field that is loaded must not be eliminated";
ReadNode memoryRead = graph.add(new ReadNode(object, location, loadStamp, fieldLoadBarrierType(field)));
ValueNode readValue = implicitLoadConvert(graph, field.getKind(), memoryRead);
loadField.replaceAtUsages(readValue);
graph.replaceFixed(loadField, memoryRead);
memoryRead.setGuard(createNullCheck(object, memoryRead, tool));
if (loadField.isVolatile()) {
MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_READ));
graph.addBeforeFixed(memoryRead, preMembar);
MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_READ));
graph.addAfterFixed(memoryRead, postMembar);
}
}
protected void lowerStoreFieldNode(StoreFieldNode storeField, LoweringTool tool) {
StructuredGraph graph = storeField.graph();
ResolvedJavaField field = storeField.field();
ValueNode object = storeField.isStatic() ? staticFieldBase(graph, field) : storeField.object();
ValueNode value = implicitStoreConvert(graph, storeField.field().getKind(), storeField.value());
ConstantLocationNode location = createFieldLocation(graph, field, false);
if (location == null) {
/* Field has been eliminated, so no write necessary. */
assert !storeField.isVolatile() : "missing memory barriers";
graph.removeFixed(storeField);
return;
}
WriteNode memoryWrite = graph.add(new WriteNode(object, value, location, fieldStoreBarrierType(storeField.field())));
memoryWrite.setStateAfter(storeField.stateAfter());
graph.replaceFixedWithFixed(storeField, memoryWrite);
memoryWrite.setGuard(createNullCheck(object, memoryWrite, tool));
if (storeField.isVolatile()) {
MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_WRITE));
graph.addBeforeFixed(memoryWrite, preMembar);
MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_WRITE));
graph.addAfterFixed(memoryWrite, postMembar);
}
}
protected void lowerLoadIndexedNode(LoadIndexedNode loadIndexed, LoweringTool tool) {
StructuredGraph graph = loadIndexed.graph();
Kind elementKind = loadIndexed.elementKind();
LocationNode location = createArrayLocation(graph, elementKind, loadIndexed.index(), false);
Stamp loadStamp = loadStamp(loadIndexed.stamp(), elementKind, true);
ReadNode memoryRead = graph.add(new ReadNode(loadIndexed.array(), location, loadStamp, BarrierType.NONE));
ValueNode readValue = implicitLoadConvert(graph, elementKind, memoryRead);
memoryRead.setGuard(createBoundsCheck(loadIndexed, tool));
loadIndexed.replaceAtUsages(readValue);
graph.replaceFixed(loadIndexed, memoryRead);
}
protected void lowerStoreIndexedNode(StoreIndexedNode storeIndexed, LoweringTool tool) {
StructuredGraph graph = storeIndexed.graph();
GuardingNode boundsCheck = createBoundsCheck(storeIndexed, tool);
Kind elementKind = storeIndexed.elementKind();
LocationNode location = createArrayLocation(graph, elementKind, storeIndexed.index(), false);
ValueNode value = storeIndexed.value();
ValueNode array = storeIndexed.array();
FixedWithNextNode checkCastNode = null;
if (elementKind == Kind.Object && !StampTool.isObjectAlwaysNull(value)) {
/* Array store check. */
ResolvedJavaType arrayType = StampTool.typeOrNull(array);
if (arrayType != null && StampTool.isExactType(array)) {
ResolvedJavaType elementType = arrayType.getComponentType();
if (!MetaUtil.isJavaLangObject(elementType)) {
checkCastNode = graph.add(new CheckCastNode(elementType, value, null, true));
graph.addBeforeFixed(storeIndexed, checkCastNode);
value = checkCastNode;
}
} else {
ValueNode arrayClass = createReadHub(graph, array, boundsCheck);
ValueNode componentHub = createReadArrayComponentHub(graph, arrayClass, storeIndexed);
checkCastNode = graph.add(new CheckCastDynamicNode(componentHub, value, true));
graph.addBeforeFixed(storeIndexed, checkCastNode);
value = checkCastNode;
}
}
WriteNode memoryWrite = graph.add(new WriteNode(array, implicitStoreConvert(graph, elementKind, value), location, arrayStoreBarrierType(storeIndexed.elementKind())));
memoryWrite.setGuard(boundsCheck);
memoryWrite.setStateAfter(storeIndexed.stateAfter());
graph.replaceFixedWithFixed(storeIndexed, memoryWrite);
if (checkCastNode instanceof Lowerable) {
/* Recursive lowering of the store check node. */
((Lowerable) checkCastNode).lower(tool);
}
}
protected void lowerArrayLengthNode(ArrayLengthNode arrayLengthNode, LoweringTool tool) {
StructuredGraph graph = arrayLengthNode.graph();
ValueNode array = arrayLengthNode.array();
ConstantLocationNode location = ConstantLocationNode.create(ARRAY_LENGTH_LOCATION, Kind.Int, arrayLengthOffset(), graph);
ReadNode arrayLengthRead = graph.add(new ReadNode(array, location, StampFactory.positiveInt(), BarrierType.NONE));
arrayLengthRead.setGuard(createNullCheck(array, arrayLengthNode, tool));
graph.replaceFixedWithFixed(arrayLengthNode, arrayLengthRead);
}
protected void lowerLoadHubNode(LoadHubNode loadHub) {
StructuredGraph graph = loadHub.graph();
if (graph.getGuardsStage().ordinal() < StructuredGraph.GuardsStage.FIXED_DEOPTS.ordinal()) {
return;
}
ValueNode hub = createReadHub(graph, loadHub.object(), loadHub.getGuard());
graph.replaceFloating(loadHub, hub);
}
protected void lowerCompareAndSwapNode(CompareAndSwapNode cas) {
StructuredGraph graph = cas.graph();
Kind valueKind = cas.getValueKind();
LocationNode location = createLocation(cas.offset(), cas.getLocationIdentity(), valueKind);
ValueNode expectedValue = implicitStoreConvert(graph, valueKind, cas.expected());
ValueNode newValue = implicitStoreConvert(graph, valueKind, cas.newValue());
LoweredCompareAndSwapNode atomicNode = graph.add(new LoweredCompareAndSwapNode(cas.object(), location, expectedValue, newValue, compareAndSwapBarrierType(cas)));
atomicNode.setStateAfter(cas.stateAfter());
graph.replaceFixedWithFixed(cas, atomicNode);
}
protected void lowerAtomicReadAndWriteNode(AtomicReadAndWriteNode n) {
StructuredGraph graph = n.graph();
Kind valueKind = n.getValueKind();
LocationNode location = IndexedLocationNode.create(n.getLocationIdentity(), valueKind, 0, n.offset(), graph, 1);
ValueNode newValue = implicitStoreConvert(graph, valueKind, n.newValue());
LoweredAtomicReadAndWriteNode memoryRead = graph.add(new LoweredAtomicReadAndWriteNode(n.object(), location, newValue, atomicReadAndWriteBarrierType(n)));
memoryRead.setStateAfter(n.stateAfter());
ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead);
n.replaceAtUsages(readValue);
graph.replaceFixedWithFixed(n, memoryRead);
}
protected void lowerUnsafeLoadNode(UnsafeLoadNode load, @SuppressWarnings("unused") LoweringTool tool) {
StructuredGraph graph = load.graph();
if (load.getGuardingCondition() != null) {
ConditionAnchorNode valueAnchorNode = graph.add(new ConditionAnchorNode(load.getGuardingCondition()));
ReadNode memoryRead = createUnsafeRead(graph, load, valueAnchorNode);
graph.replaceFixedWithFixed(load, valueAnchorNode);
graph.addAfterFixed(valueAnchorNode, memoryRead);
} else if (graph.getGuardsStage().ordinal() > StructuredGraph.GuardsStage.FLOATING_GUARDS.ordinal()) {
assert load.getKind() != Kind.Illegal;
ReadNode memoryRead = createUnsafeRead(graph, load, null);
// An unsafe read must not float outside its block otherwise
// it may float above an explicit null check on its object.
memoryRead.setGuard(BeginNode.prevBegin(load));
graph.replaceFixedWithFixed(load, memoryRead);
}
}
protected ReadNode createUnsafeRead(StructuredGraph graph, UnsafeLoadNode load, GuardingNode guard) {
boolean compressible = load.accessKind() == Kind.Object;
Kind readKind = load.accessKind();
LocationNode location = createLocation(load);
Stamp loadStamp = loadStamp(load.stamp(), readKind, compressible);
ReadNode memoryRead = graph.add(new ReadNode(load.object(), location, loadStamp, guard, BarrierType.NONE));
ValueNode readValue = implicitLoadConvert(graph, readKind, memoryRead, compressible);
load.replaceAtUsages(readValue);
return memoryRead;
}
protected void lowerUnsafeStoreNode(UnsafeStoreNode store) {
StructuredGraph graph = store.graph();
LocationNode location = createLocation(store);
ValueNode object = store.object();
boolean compressible = store.value().getKind() == Kind.Object;
Kind valueKind = store.accessKind();
ValueNode value = implicitStoreConvert(graph, valueKind, store.value(), compressible);
WriteNode write = graph.add(new WriteNode(object, value, location, unsafeStoreBarrierType(store)));
write.setStateAfter(store.stateAfter());
graph.replaceFixedWithFixed(store, write);
}
protected void lowerJavaReadNode(JavaReadNode read) {
StructuredGraph graph = read.graph();
Kind valueKind = read.location().getValueKind();
Stamp loadStamp = loadStamp(read.stamp(), valueKind, read.isCompressible());
ReadNode memoryRead = graph.add(new ReadNode(read.object(), read.location(), loadStamp, read.getBarrierType()));
ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead, read.isCompressible());
memoryRead.setGuard(read.getGuard());
read.replaceAtUsages(readValue);
graph.replaceFixed(read, memoryRead);
}
protected void lowerJavaWriteNode(JavaWriteNode write) {
StructuredGraph graph = write.graph();
Kind valueKind = write.location().getValueKind();
ValueNode value = implicitStoreConvert(graph, valueKind, write.value(), write.isCompressible());
WriteNode memoryWrite = graph.add(new WriteNode(write.object(), value, write.location(), write.getBarrierType(), write.isInitialization()));
memoryWrite.setStateAfter(write.stateAfter());
graph.replaceFixedWithFixed(write, memoryWrite);
memoryWrite.setGuard(write.getGuard());
}
protected void lowerCommitAllocationNode(CommitAllocationNode commit, LoweringTool tool) {
StructuredGraph graph = commit.graph();
if (graph.getGuardsStage() == StructuredGraph.GuardsStage.FIXED_DEOPTS) {
List<AbstractNewObjectNode> recursiveLowerings = new ArrayList<>();
ValueNode[] allocations = new ValueNode[commit.getVirtualObjects().size()];
BitSet omittedValues = new BitSet();
int valuePos = 0;
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex);
int entryCount = virtual.entryCount();
AbstractNewObjectNode newObject;
if (virtual instanceof VirtualInstanceNode) {
newObject = graph.add(new NewInstanceNode(virtual.type(), true));
} else {
newObject = graph.add(new NewArrayNode(((VirtualArrayNode) virtual).componentType(), ConstantNode.forInt(entryCount, graph), true));
}
recursiveLowerings.add(newObject);
graph.addBeforeFixed(commit, newObject);
allocations[objIndex] = newObject;
for (int i = 0; i < entryCount; i++) {
ValueNode value = commit.getValues().get(valuePos);
if (value instanceof VirtualObjectNode) {
value = allocations[commit.getVirtualObjects().indexOf(value)];
}
if (value == null) {
omittedValues.set(valuePos);
} else if (!(value.isConstant() && value.asConstant().isDefaultForKind())) {
// Constant.illegal is always the defaultForKind, so it is skipped
Kind valueKind = value.getKind();
Kind entryKind = virtual.entryKind(i);
// Truffle requires some leniency in terms of what can be put where:
Kind accessKind = valueKind.getStackKind() == entryKind.getStackKind() ? entryKind : valueKind;
assert valueKind.getStackKind() == entryKind.getStackKind() ||
(valueKind == Kind.Long || valueKind == Kind.Double || (valueKind == Kind.Int && virtual instanceof VirtualArrayNode));
ConstantLocationNode location = null;
BarrierType barrierType = null;
if (virtual instanceof VirtualInstanceNode) {
ResolvedJavaField field = ((VirtualInstanceNode) virtual).field(i);
long offset = fieldOffset(field);
if (offset >= 0) {
location = ConstantLocationNode.create(initLocationIdentity(), accessKind, offset, graph);
barrierType = fieldInitializationBarrier(entryKind);
}
} else {
location = ConstantLocationNode.create(initLocationIdentity(), accessKind, arrayBaseOffset(entryKind) + i * arrayScalingFactor(entryKind), graph);
barrierType = arrayInitializationBarrier(entryKind);
}
if (location != null) {
WriteNode write = new WriteNode(newObject, implicitStoreConvert(graph, entryKind, value), location, barrierType);
graph.addAfterFixed(newObject, graph.add(write));
}
}
valuePos++;
}
}
valuePos = 0;
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex);
int entryCount = virtual.entryCount();
ValueNode newObject = allocations[objIndex];
for (int i = 0; i < entryCount; i++) {
if (omittedValues.get(valuePos)) {
ValueNode value = commit.getValues().get(valuePos);
assert value instanceof VirtualObjectNode;
ValueNode allocValue = allocations[commit.getVirtualObjects().indexOf(value)];
if (!(allocValue.isConstant() && allocValue.asConstant().isDefaultForKind())) {
assert virtual.entryKind(i) == Kind.Object && allocValue.getKind() == Kind.Object;
LocationNode location;
BarrierType barrierType;
if (virtual instanceof VirtualInstanceNode) {
VirtualInstanceNode virtualInstance = (VirtualInstanceNode) virtual;
location = createFieldLocation(graph, virtualInstance.field(i), true);
barrierType = BarrierType.IMPRECISE;
} else {
location = createArrayLocation(graph, virtual.entryKind(i), ConstantNode.forInt(i, graph), true);
barrierType = BarrierType.PRECISE;
}
if (location != null) {
WriteNode write = new WriteNode(newObject, implicitStoreConvert(graph, Kind.Object, allocValue), location, barrierType);
graph.addBeforeFixed(commit, graph.add(write));
}
}
}
valuePos++;
}
}
finishAllocatedObjects(tool, commit, allocations);
graph.removeFixed(commit);
for (AbstractNewObjectNode recursiveLowering : recursiveLowerings) {
recursiveLowering.lower(tool);
}
}
}
public static void finishAllocatedObjects(LoweringTool tool, CommitAllocationNode commit, ValueNode[] allocations) {
StructuredGraph graph = commit.graph();
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
FixedValueAnchorNode anchor = graph.add(new FixedValueAnchorNode(allocations[objIndex]));
allocations[objIndex] = anchor;
graph.addBeforeFixed(commit, anchor);
}
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
for (MonitorIdNode monitorId : commit.getLocks(objIndex)) {
MonitorEnterNode enter = graph.add(new MonitorEnterNode(allocations[objIndex], monitorId));
graph.addBeforeFixed(commit, enter);
enter.lower(tool);
}
}
for (Node usage : commit.usages().snapshot()) {
AllocatedObjectNode addObject = (AllocatedObjectNode) usage;
int index = commit.getVirtualObjects().indexOf(addObject.getVirtualObject());
graph.replaceFloating(addObject, allocations[index]);
}
}
protected BarrierType fieldLoadBarrierType(@SuppressWarnings("unused") ResolvedJavaField field) {
return BarrierType.NONE;
}
protected BarrierType fieldStoreBarrierType(ResolvedJavaField field) {
if (field.getKind() == Kind.Object) {
return BarrierType.IMPRECISE;
}
return BarrierType.NONE;
}
protected BarrierType arrayStoreBarrierType(Kind elementKind) {
if (elementKind == Kind.Object) {
return BarrierType.PRECISE;
}
return BarrierType.NONE;
}
protected BarrierType fieldInitializationBarrier(Kind entryKind) {
return entryKind == Kind.Object ? BarrierType.IMPRECISE : BarrierType.NONE;
}
protected BarrierType arrayInitializationBarrier(Kind entryKind) {
return entryKind == Kind.Object ? BarrierType.PRECISE : BarrierType.NONE;
}
protected BarrierType unsafeStoreBarrierType(UnsafeStoreNode store) {
return storeBarrierType(store.object(), store.value());
}
protected BarrierType compareAndSwapBarrierType(CompareAndSwapNode cas) {
return storeBarrierType(cas.object(), cas.expected());
}
protected BarrierType atomicReadAndWriteBarrierType(AtomicReadAndWriteNode n) {
return storeBarrierType(n.object(), n.newValue());
}
protected BarrierType storeBarrierType(ValueNode object, ValueNode value) {
if (value.getKind() == Kind.Object) {
ResolvedJavaType type = StampTool.typeOrNull(object);
if (type != null && !type.isArray()) {
return BarrierType.IMPRECISE;
} else {
return BarrierType.PRECISE;
}
}
return BarrierType.NONE;
}
protected abstract int fieldOffset(ResolvedJavaField field);
protected abstract ValueNode staticFieldBase(StructuredGraph graph, ResolvedJavaField field);
protected abstract int arrayLengthOffset();
protected abstract int arrayBaseOffset(Kind elementKind);
public int arrayScalingFactor(Kind elementKind) {
return target.getSizeInBytes(elementKind);
}
protected abstract LocationIdentity initLocationIdentity();
protected Stamp loadStamp(Stamp stamp, Kind kind, @SuppressWarnings("unused") boolean compressible) {
switch (kind) {
case Boolean:
case Byte:
return StampTool.narrowingConversion(stamp, 8);
case Char:
case Short:
return StampTool.narrowingConversion(stamp, 16);
}
return stamp;
}
public ValueNode implicitLoadConvert(StructuredGraph graph, Kind kind, ValueNode value) {
return implicitLoadConvert(graph, kind, value, true);
}
protected ValueNode implicitLoadConvert(StructuredGraph graph, Kind kind, ValueNode value, @SuppressWarnings("unused") boolean compressible) {
switch (kind) {
case Byte:
case Short:
return graph.unique(new SignExtendNode(value, 32));
case Boolean:
case Char:
return graph.unique(new ZeroExtendNode(value, 32));
}
return value;
}
public ValueNode implicitStoreConvert(StructuredGraph graph, Kind kind, ValueNode value) {
return implicitStoreConvert(graph, kind, value, true);
}
protected ValueNode implicitStoreConvert(StructuredGraph graph, Kind kind, ValueNode value, @SuppressWarnings("unused") boolean compressible) {
switch (kind) {
case Boolean:
case Byte:
return graph.unique(new NarrowNode(value, 8));
case Char:
case Short:
return graph.unique(new NarrowNode(value, 16));
}
return value;
}
protected abstract ValueNode createReadHub(StructuredGraph graph, ValueNode object, GuardingNode guard);
protected abstract ValueNode createReadArrayComponentHub(StructuredGraph graph, ValueNode arrayHub, FixedNode anchor);
protected ConstantLocationNode createFieldLocation(StructuredGraph graph, ResolvedJavaField field, boolean initialization) {
int offset = fieldOffset(field);
if (offset >= 0) {
LocationIdentity loc = initialization ? initLocationIdentity() : field;
return ConstantLocationNode.create(loc, field.getKind(), offset, graph);
} else {
return null;
}
}
protected LocationNode createLocation(UnsafeAccessNode access) {
return createLocation(access.offset(), access.getLocationIdentity(), access.accessKind());
}
protected LocationNode createLocation(ValueNode offsetNode, LocationIdentity locationIdentity, Kind accessKind) {
ValueNode offset = offsetNode;
if (offset.isConstant()) {
long offsetValue = offset.asConstant().asLong();
return ConstantLocationNode.create(locationIdentity, accessKind, offsetValue, offset.graph());
}
long displacement = 0;
int indexScaling = 1;
boolean signExtend = false;
if (offset instanceof SignExtendNode) {
SignExtendNode extend = (SignExtendNode) offset;
if (extend.getResultBits() == 64) {
signExtend = true;
offset = extend.getInput();
}
}
if (offset instanceof IntegerAddNode) {
IntegerAddNode integerAddNode = (IntegerAddNode) offset;
if (integerAddNode.y() instanceof ConstantNode) {
displacement = integerAddNode.y().asConstant().asLong();
offset = integerAddNode.x();
}
}
if (offset instanceof LeftShiftNode) {
LeftShiftNode leftShiftNode = (LeftShiftNode) offset;
if (leftShiftNode.y() instanceof ConstantNode) {
long shift = leftShiftNode.y().asConstant().asLong();
if (shift >= 1 && shift <= 3) {
if (shift == 1) {
indexScaling = 2;
} else if (shift == 2) {
indexScaling = 4;
} else {
indexScaling = 8;
}
offset = leftShiftNode.x();
}
}
}
if (signExtend) {
// If we were using sign extended values before restore the sign extension.
offset = offset.graph().addOrUnique(new SignExtendNode(offset, 64));
}
return IndexedLocationNode.create(locationIdentity, accessKind, displacement, offset, offset.graph(), indexScaling);
}
public IndexedLocationNode createArrayLocation(Graph graph, Kind elementKind, ValueNode index, boolean initialization) {
LocationIdentity loc = initialization ? initLocationIdentity() : NamedLocationIdentity.getArrayLocation(elementKind);
return IndexedLocationNode.create(loc, elementKind, arrayBaseOffset(elementKind), index, graph, arrayScalingFactor(elementKind));
}
protected GuardingNode createBoundsCheck(AccessIndexedNode n, LoweringTool tool) {
StructuredGraph graph = n.graph();
ValueNode array = n.array();
ValueNode arrayLength = readArrayLength(n.graph(), array, tool.getConstantReflection());
if (arrayLength == null) {
Stamp stamp = StampFactory.positiveInt();
ReadNode readArrayLength = graph.add(new ReadNode(array, ConstantLocationNode.create(ARRAY_LENGTH_LOCATION, Kind.Int, arrayLengthOffset(), graph), stamp, BarrierType.NONE));
graph.addBeforeFixed(n, readArrayLength);
readArrayLength.setGuard(createNullCheck(array, readArrayLength, tool));
arrayLength = readArrayLength;
}
if (arrayLength.isConstant() && n.index().isConstant()) {
int l = arrayLength.asConstant().asInt();
int i = n.index().asConstant().asInt();
if (i >= 0 && i < l) {
// unneeded range check
return null;
}
}
return tool.createGuard(n, graph.unique(new IntegerBelowThanNode(n.index(), arrayLength)), BoundsCheckException, InvalidateReprofile);
}
protected GuardingNode createNullCheck(ValueNode object, FixedNode before, LoweringTool tool) {
if (StampTool.isObjectNonNull(object)) {
return null;
}
return tool.createGuard(before, before.graph().unique(new IsNullNode(object)), DeoptimizationReason.NullCheckException, DeoptimizationAction.InvalidateReprofile, true);
}
@Override
public ValueNode reconstructArrayIndex(LocationNode location) {
Kind elementKind = location.getValueKind();
assert location.getLocationIdentity().equals(NamedLocationIdentity.getArrayLocation(elementKind));
long base;
ValueNode index;
int scale = arrayScalingFactor(elementKind);
if (location instanceof ConstantLocationNode) {
base = ((ConstantLocationNode) location).getDisplacement();
index = null;
} else if (location instanceof IndexedLocationNode) {
IndexedLocationNode indexedLocation = (IndexedLocationNode) location;
assert indexedLocation.getIndexScaling() == scale;
base = indexedLocation.getDisplacement();
index = indexedLocation.getIndex();
} else {
throw GraalInternalError.shouldNotReachHere();
}
base -= arrayBaseOffset(elementKind);
assert base >= 0 && base % scale == 0;
base /= scale;
assert NumUtil.isInt(base);
StructuredGraph graph = location.graph();
if (index == null) {
return ConstantNode.forInt((int) base, graph);
} else {
if (base == 0) {
return index;
} else {
return IntegerArithmeticNode.add(graph, ConstantNode.forInt((int) base, graph), index);
}
}
}
}
|
graal/com.oracle.graal.replacements/src/com/oracle/graal/replacements/DefaultJavaLoweringProvider.java
|
/*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.replacements;
import static com.oracle.graal.api.code.MemoryBarriers.*;
import static com.oracle.graal.api.meta.DeoptimizationAction.*;
import static com.oracle.graal.api.meta.DeoptimizationReason.*;
import static com.oracle.graal.api.meta.LocationIdentity.*;
import static com.oracle.graal.nodes.java.ArrayLengthNode.*;
import java.util.*;
import com.oracle.graal.api.code.*;
import com.oracle.graal.api.meta.*;
import com.oracle.graal.api.replacements.*;
import com.oracle.graal.asm.*;
import com.oracle.graal.compiler.common.*;
import com.oracle.graal.compiler.common.type.*;
import com.oracle.graal.graph.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.nodes.HeapAccess.BarrierType;
import com.oracle.graal.nodes.calc.*;
import com.oracle.graal.nodes.extended.*;
import com.oracle.graal.nodes.java.*;
import com.oracle.graal.nodes.spi.*;
import com.oracle.graal.nodes.type.*;
import com.oracle.graal.nodes.virtual.*;
import com.oracle.graal.phases.util.*;
/**
* VM-independent lowerings for standard Java nodes. VM-specific methods are abstract and must be
* implemented by VM-specific subclasses.
*/
public abstract class DefaultJavaLoweringProvider implements LoweringProvider {
protected final MetaAccessProvider metaAccess;
protected final TargetDescription target;
private BoxingSnippets.Templates boxingSnippets;
public DefaultJavaLoweringProvider(MetaAccessProvider metaAccess, TargetDescription target) {
this.metaAccess = metaAccess;
this.target = target;
}
public void initialize(Providers providers, SnippetReflectionProvider snippetReflection) {
boxingSnippets = new BoxingSnippets.Templates(providers, snippetReflection, target);
}
@Override
public void lower(Node n, LoweringTool tool) {
if (n instanceof LoadFieldNode) {
lowerLoadFieldNode((LoadFieldNode) n, tool);
} else if (n instanceof StoreFieldNode) {
lowerStoreFieldNode((StoreFieldNode) n, tool);
} else if (n instanceof LoadIndexedNode) {
lowerLoadIndexedNode((LoadIndexedNode) n, tool);
} else if (n instanceof StoreIndexedNode) {
lowerStoreIndexedNode((StoreIndexedNode) n, tool);
} else if (n instanceof ArrayLengthNode) {
lowerArrayLengthNode((ArrayLengthNode) n, tool);
} else if (n instanceof LoadHubNode) {
lowerLoadHubNode((LoadHubNode) n);
} else if (n instanceof CompareAndSwapNode) {
lowerCompareAndSwapNode((CompareAndSwapNode) n);
} else if (n instanceof AtomicReadAndWriteNode) {
lowerAtomicReadAndWriteNode((AtomicReadAndWriteNode) n);
} else if (n instanceof UnsafeLoadNode) {
lowerUnsafeLoadNode((UnsafeLoadNode) n, tool);
} else if (n instanceof UnsafeStoreNode) {
lowerUnsafeStoreNode((UnsafeStoreNode) n);
} else if (n instanceof JavaReadNode) {
lowerJavaReadNode((JavaReadNode) n);
} else if (n instanceof JavaWriteNode) {
lowerJavaWriteNode((JavaWriteNode) n);
} else if (n instanceof CommitAllocationNode) {
lowerCommitAllocationNode((CommitAllocationNode) n, tool);
} else if (n instanceof BoxNode) {
boxingSnippets.lower((BoxNode) n, tool);
} else if (n instanceof UnboxNode) {
boxingSnippets.lower((UnboxNode) n, tool);
} else {
throw GraalInternalError.shouldNotReachHere("Node implementing Lowerable not handled: " + n);
}
}
protected void lowerLoadFieldNode(LoadFieldNode loadField, LoweringTool tool) {
assert loadField.getKind() != Kind.Illegal;
StructuredGraph graph = loadField.graph();
ResolvedJavaField field = loadField.field();
ValueNode object = loadField.isStatic() ? staticFieldBase(graph, field) : loadField.object();
Stamp loadStamp = loadStamp(loadField.stamp(), field.getKind(), true);
ConstantLocationNode location = createFieldLocation(graph, field, false);
assert location != null : "Field that is loaded must not be eliminated";
ReadNode memoryRead = graph.add(new ReadNode(object, location, loadStamp, fieldLoadBarrierType(field)));
ValueNode readValue = implicitLoadConvert(graph, field.getKind(), memoryRead);
loadField.replaceAtUsages(readValue);
graph.replaceFixed(loadField, memoryRead);
memoryRead.setGuard(createNullCheck(object, memoryRead, tool));
if (loadField.isVolatile()) {
MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_READ));
graph.addBeforeFixed(memoryRead, preMembar);
MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_READ));
graph.addAfterFixed(memoryRead, postMembar);
}
}
protected void lowerStoreFieldNode(StoreFieldNode storeField, LoweringTool tool) {
StructuredGraph graph = storeField.graph();
ResolvedJavaField field = storeField.field();
ValueNode object = storeField.isStatic() ? staticFieldBase(graph, field) : storeField.object();
ValueNode value = implicitStoreConvert(graph, storeField.field().getKind(), storeField.value());
ConstantLocationNode location = createFieldLocation(graph, field, false);
if (location == null) {
/* Field has been eliminated, so no write necessary. */
assert !storeField.isVolatile() : "missing memory barriers";
graph.removeFixed(storeField);
return;
}
WriteNode memoryWrite = graph.add(new WriteNode(object, value, location, fieldStoreBarrierType(storeField.field())));
memoryWrite.setStateAfter(storeField.stateAfter());
graph.replaceFixedWithFixed(storeField, memoryWrite);
memoryWrite.setGuard(createNullCheck(object, memoryWrite, tool));
if (storeField.isVolatile()) {
MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_WRITE));
graph.addBeforeFixed(memoryWrite, preMembar);
MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_WRITE));
graph.addAfterFixed(memoryWrite, postMembar);
}
}
protected void lowerLoadIndexedNode(LoadIndexedNode loadIndexed, LoweringTool tool) {
StructuredGraph graph = loadIndexed.graph();
Kind elementKind = loadIndexed.elementKind();
LocationNode location = createArrayLocation(graph, elementKind, loadIndexed.index(), false);
Stamp loadStamp = loadStamp(loadIndexed.stamp(), elementKind, true);
ReadNode memoryRead = graph.add(new ReadNode(loadIndexed.array(), location, loadStamp, BarrierType.NONE));
ValueNode readValue = implicitLoadConvert(graph, elementKind, memoryRead);
memoryRead.setGuard(createBoundsCheck(loadIndexed, tool));
loadIndexed.replaceAtUsages(readValue);
graph.replaceFixed(loadIndexed, memoryRead);
}
protected void lowerStoreIndexedNode(StoreIndexedNode storeIndexed, LoweringTool tool) {
StructuredGraph graph = storeIndexed.graph();
GuardingNode boundsCheck = createBoundsCheck(storeIndexed, tool);
Kind elementKind = storeIndexed.elementKind();
LocationNode location = createArrayLocation(graph, elementKind, storeIndexed.index(), false);
ValueNode value = storeIndexed.value();
ValueNode array = storeIndexed.array();
FixedWithNextNode checkCastNode = null;
if (elementKind == Kind.Object && !StampTool.isObjectAlwaysNull(value)) {
/* Array store check. */
ResolvedJavaType arrayType = StampTool.typeOrNull(array);
if (arrayType != null && StampTool.isExactType(array)) {
ResolvedJavaType elementType = arrayType.getComponentType();
if (!MetaUtil.isJavaLangObject(elementType)) {
checkCastNode = graph.add(new CheckCastNode(elementType, value, null, true));
graph.addBeforeFixed(storeIndexed, checkCastNode);
value = checkCastNode;
}
} else {
ValueNode arrayClass = createReadHub(graph, array, boundsCheck);
ValueNode componentHub = createReadArrayComponentHub(graph, arrayClass, storeIndexed);
checkCastNode = graph.add(new CheckCastDynamicNode(componentHub, value, true));
graph.addBeforeFixed(storeIndexed, checkCastNode);
value = checkCastNode;
}
}
WriteNode memoryWrite = graph.add(new WriteNode(array, implicitStoreConvert(graph, elementKind, value), location, arrayStoreBarrierType(storeIndexed.elementKind())));
memoryWrite.setGuard(boundsCheck);
memoryWrite.setStateAfter(storeIndexed.stateAfter());
graph.replaceFixedWithFixed(storeIndexed, memoryWrite);
if (checkCastNode instanceof Lowerable) {
/* Recursive lowering of the store check node. */
((Lowerable) checkCastNode).lower(tool);
}
}
protected void lowerArrayLengthNode(ArrayLengthNode arrayLengthNode, LoweringTool tool) {
StructuredGraph graph = arrayLengthNode.graph();
ValueNode array = arrayLengthNode.array();
ConstantLocationNode location = ConstantLocationNode.create(ARRAY_LENGTH_LOCATION, Kind.Int, arrayLengthOffset(), graph);
ReadNode arrayLengthRead = graph.add(new ReadNode(array, location, StampFactory.positiveInt(), BarrierType.NONE));
arrayLengthRead.setGuard(createNullCheck(array, arrayLengthNode, tool));
graph.replaceFixedWithFixed(arrayLengthNode, arrayLengthRead);
}
protected void lowerLoadHubNode(LoadHubNode loadHub) {
StructuredGraph graph = loadHub.graph();
if (graph.getGuardsStage().ordinal() < StructuredGraph.GuardsStage.FIXED_DEOPTS.ordinal()) {
return;
}
ValueNode hub = createReadHub(graph, loadHub.object(), loadHub.getGuard());
graph.replaceFloating(loadHub, hub);
}
protected void lowerCompareAndSwapNode(CompareAndSwapNode cas) {
StructuredGraph graph = cas.graph();
Kind valueKind = cas.getValueKind();
LocationNode location = createLocation(cas.offset(), cas.getLocationIdentity(), valueKind);
ValueNode expectedValue = implicitStoreConvert(graph, valueKind, cas.expected());
ValueNode newValue = implicitStoreConvert(graph, valueKind, cas.newValue());
LoweredCompareAndSwapNode atomicNode = graph.add(new LoweredCompareAndSwapNode(cas.object(), location, expectedValue, newValue, compareAndSwapBarrierType(cas)));
atomicNode.setStateAfter(cas.stateAfter());
graph.replaceFixedWithFixed(cas, atomicNode);
}
protected void lowerAtomicReadAndWriteNode(AtomicReadAndWriteNode n) {
StructuredGraph graph = n.graph();
Kind valueKind = n.getValueKind();
LocationNode location = IndexedLocationNode.create(n.getLocationIdentity(), valueKind, 0, n.offset(), graph, 1);
ValueNode newValue = implicitStoreConvert(graph, valueKind, n.newValue());
LoweredAtomicReadAndWriteNode memoryRead = graph.add(new LoweredAtomicReadAndWriteNode(n.object(), location, newValue, atomicReadAndWriteBarrierType(n)));
memoryRead.setStateAfter(n.stateAfter());
ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead);
n.replaceAtUsages(readValue);
graph.replaceFixedWithFixed(n, memoryRead);
}
protected void lowerUnsafeLoadNode(UnsafeLoadNode load, @SuppressWarnings("unused") LoweringTool tool) {
StructuredGraph graph = load.graph();
if (load.getGuardingCondition() != null) {
ConditionAnchorNode valueAnchorNode = graph.add(new ConditionAnchorNode(load.getGuardingCondition()));
ReadNode memoryRead = createUnsafeRead(graph, load, valueAnchorNode);
graph.replaceFixedWithFixed(load, valueAnchorNode);
graph.addAfterFixed(valueAnchorNode, memoryRead);
} else if (graph.getGuardsStage().ordinal() > StructuredGraph.GuardsStage.FLOATING_GUARDS.ordinal()) {
assert load.getKind() != Kind.Illegal;
ReadNode memoryRead = createUnsafeRead(graph, load, null);
// An unsafe read must not float outside its block otherwise
// it may float above an explicit null check on its object.
memoryRead.setGuard(BeginNode.prevBegin(load));
graph.replaceFixedWithFixed(load, memoryRead);
}
}
protected ReadNode createUnsafeRead(StructuredGraph graph, UnsafeLoadNode load, GuardingNode guard) {
boolean compressible = (!load.object().isNullConstant() && load.accessKind() == Kind.Object);
Kind readKind = load.accessKind();
LocationNode location = createLocation(load);
Stamp loadStamp = loadStamp(load.stamp(), readKind, compressible);
ReadNode memoryRead = graph.add(new ReadNode(load.object(), location, loadStamp, guard, BarrierType.NONE));
ValueNode readValue = implicitLoadConvert(graph, readKind, memoryRead, compressible);
load.replaceAtUsages(readValue);
return memoryRead;
}
protected void lowerUnsafeStoreNode(UnsafeStoreNode store) {
StructuredGraph graph = store.graph();
LocationNode location = createLocation(store);
ValueNode object = store.object();
boolean compressible = store.value().getKind() == Kind.Object;
Kind valueKind = store.accessKind();
ValueNode value = implicitStoreConvert(graph, valueKind, store.value(), compressible);
WriteNode write = graph.add(new WriteNode(object, value, location, unsafeStoreBarrierType(store)));
write.setStateAfter(store.stateAfter());
graph.replaceFixedWithFixed(store, write);
}
protected void lowerJavaReadNode(JavaReadNode read) {
StructuredGraph graph = read.graph();
Kind valueKind = read.location().getValueKind();
Stamp loadStamp = loadStamp(read.stamp(), valueKind, read.isCompressible());
ReadNode memoryRead = graph.add(new ReadNode(read.object(), read.location(), loadStamp, read.getBarrierType()));
ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead, read.isCompressible());
memoryRead.setGuard(read.getGuard());
read.replaceAtUsages(readValue);
graph.replaceFixed(read, memoryRead);
}
protected void lowerJavaWriteNode(JavaWriteNode write) {
StructuredGraph graph = write.graph();
Kind valueKind = write.location().getValueKind();
ValueNode value = implicitStoreConvert(graph, valueKind, write.value(), write.isCompressible());
WriteNode memoryWrite = graph.add(new WriteNode(write.object(), value, write.location(), write.getBarrierType(), write.isInitialization()));
memoryWrite.setStateAfter(write.stateAfter());
graph.replaceFixedWithFixed(write, memoryWrite);
memoryWrite.setGuard(write.getGuard());
}
protected void lowerCommitAllocationNode(CommitAllocationNode commit, LoweringTool tool) {
StructuredGraph graph = commit.graph();
if (graph.getGuardsStage() == StructuredGraph.GuardsStage.FIXED_DEOPTS) {
List<AbstractNewObjectNode> recursiveLowerings = new ArrayList<>();
ValueNode[] allocations = new ValueNode[commit.getVirtualObjects().size()];
BitSet omittedValues = new BitSet();
int valuePos = 0;
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex);
int entryCount = virtual.entryCount();
AbstractNewObjectNode newObject;
if (virtual instanceof VirtualInstanceNode) {
newObject = graph.add(new NewInstanceNode(virtual.type(), true));
} else {
newObject = graph.add(new NewArrayNode(((VirtualArrayNode) virtual).componentType(), ConstantNode.forInt(entryCount, graph), true));
}
recursiveLowerings.add(newObject);
graph.addBeforeFixed(commit, newObject);
allocations[objIndex] = newObject;
for (int i = 0; i < entryCount; i++) {
ValueNode value = commit.getValues().get(valuePos);
if (value instanceof VirtualObjectNode) {
value = allocations[commit.getVirtualObjects().indexOf(value)];
}
if (value == null) {
omittedValues.set(valuePos);
} else if (!(value.isConstant() && value.asConstant().isDefaultForKind())) {
// Constant.illegal is always the defaultForKind, so it is skipped
Kind valueKind = value.getKind();
Kind entryKind = virtual.entryKind(i);
// Truffle requires some leniency in terms of what can be put where:
Kind accessKind = valueKind.getStackKind() == entryKind.getStackKind() ? entryKind : valueKind;
assert valueKind.getStackKind() == entryKind.getStackKind() ||
(valueKind == Kind.Long || valueKind == Kind.Double || (valueKind == Kind.Int && virtual instanceof VirtualArrayNode));
ConstantLocationNode location = null;
BarrierType barrierType = null;
if (virtual instanceof VirtualInstanceNode) {
ResolvedJavaField field = ((VirtualInstanceNode) virtual).field(i);
long offset = fieldOffset(field);
if (offset >= 0) {
location = ConstantLocationNode.create(initLocationIdentity(), accessKind, offset, graph);
barrierType = fieldInitializationBarrier(entryKind);
}
} else {
location = ConstantLocationNode.create(initLocationIdentity(), accessKind, arrayBaseOffset(entryKind) + i * arrayScalingFactor(entryKind), graph);
barrierType = arrayInitializationBarrier(entryKind);
}
if (location != null) {
WriteNode write = new WriteNode(newObject, implicitStoreConvert(graph, entryKind, value), location, barrierType);
graph.addAfterFixed(newObject, graph.add(write));
}
}
valuePos++;
}
}
valuePos = 0;
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex);
int entryCount = virtual.entryCount();
ValueNode newObject = allocations[objIndex];
for (int i = 0; i < entryCount; i++) {
if (omittedValues.get(valuePos)) {
ValueNode value = commit.getValues().get(valuePos);
assert value instanceof VirtualObjectNode;
ValueNode allocValue = allocations[commit.getVirtualObjects().indexOf(value)];
if (!(allocValue.isConstant() && allocValue.asConstant().isDefaultForKind())) {
assert virtual.entryKind(i) == Kind.Object && allocValue.getKind() == Kind.Object;
LocationNode location;
BarrierType barrierType;
if (virtual instanceof VirtualInstanceNode) {
VirtualInstanceNode virtualInstance = (VirtualInstanceNode) virtual;
location = createFieldLocation(graph, virtualInstance.field(i), true);
barrierType = BarrierType.IMPRECISE;
} else {
location = createArrayLocation(graph, virtual.entryKind(i), ConstantNode.forInt(i, graph), true);
barrierType = BarrierType.PRECISE;
}
if (location != null) {
WriteNode write = new WriteNode(newObject, implicitStoreConvert(graph, Kind.Object, allocValue), location, barrierType);
graph.addBeforeFixed(commit, graph.add(write));
}
}
}
valuePos++;
}
}
finishAllocatedObjects(tool, commit, allocations);
graph.removeFixed(commit);
for (AbstractNewObjectNode recursiveLowering : recursiveLowerings) {
recursiveLowering.lower(tool);
}
}
}
public static void finishAllocatedObjects(LoweringTool tool, CommitAllocationNode commit, ValueNode[] allocations) {
StructuredGraph graph = commit.graph();
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
FixedValueAnchorNode anchor = graph.add(new FixedValueAnchorNode(allocations[objIndex]));
allocations[objIndex] = anchor;
graph.addBeforeFixed(commit, anchor);
}
for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) {
for (MonitorIdNode monitorId : commit.getLocks(objIndex)) {
MonitorEnterNode enter = graph.add(new MonitorEnterNode(allocations[objIndex], monitorId));
graph.addBeforeFixed(commit, enter);
enter.lower(tool);
}
}
for (Node usage : commit.usages().snapshot()) {
AllocatedObjectNode addObject = (AllocatedObjectNode) usage;
int index = commit.getVirtualObjects().indexOf(addObject.getVirtualObject());
graph.replaceFloating(addObject, allocations[index]);
}
}
protected BarrierType fieldLoadBarrierType(@SuppressWarnings("unused") ResolvedJavaField field) {
return BarrierType.NONE;
}
protected BarrierType fieldStoreBarrierType(ResolvedJavaField field) {
if (field.getKind() == Kind.Object) {
return BarrierType.IMPRECISE;
}
return BarrierType.NONE;
}
protected BarrierType arrayStoreBarrierType(Kind elementKind) {
if (elementKind == Kind.Object) {
return BarrierType.PRECISE;
}
return BarrierType.NONE;
}
protected BarrierType fieldInitializationBarrier(Kind entryKind) {
return entryKind == Kind.Object ? BarrierType.IMPRECISE : BarrierType.NONE;
}
protected BarrierType arrayInitializationBarrier(Kind entryKind) {
return entryKind == Kind.Object ? BarrierType.PRECISE : BarrierType.NONE;
}
protected BarrierType unsafeStoreBarrierType(UnsafeStoreNode store) {
return storeBarrierType(store.object(), store.value());
}
protected BarrierType compareAndSwapBarrierType(CompareAndSwapNode cas) {
return storeBarrierType(cas.object(), cas.expected());
}
protected BarrierType atomicReadAndWriteBarrierType(AtomicReadAndWriteNode n) {
return storeBarrierType(n.object(), n.newValue());
}
protected BarrierType storeBarrierType(ValueNode object, ValueNode value) {
if (value.getKind() == Kind.Object) {
ResolvedJavaType type = StampTool.typeOrNull(object);
if (type != null && !type.isArray()) {
return BarrierType.IMPRECISE;
} else {
return BarrierType.PRECISE;
}
}
return BarrierType.NONE;
}
protected abstract int fieldOffset(ResolvedJavaField field);
protected abstract ValueNode staticFieldBase(StructuredGraph graph, ResolvedJavaField field);
protected abstract int arrayLengthOffset();
protected abstract int arrayBaseOffset(Kind elementKind);
public int arrayScalingFactor(Kind elementKind) {
return target.getSizeInBytes(elementKind);
}
protected abstract LocationIdentity initLocationIdentity();
protected Stamp loadStamp(Stamp stamp, Kind kind, @SuppressWarnings("unused") boolean compressible) {
switch (kind) {
case Boolean:
case Byte:
return StampTool.narrowingConversion(stamp, 8);
case Char:
case Short:
return StampTool.narrowingConversion(stamp, 16);
}
return stamp;
}
public ValueNode implicitLoadConvert(StructuredGraph graph, Kind kind, ValueNode value) {
return implicitLoadConvert(graph, kind, value, true);
}
protected ValueNode implicitLoadConvert(StructuredGraph graph, Kind kind, ValueNode value, @SuppressWarnings("unused") boolean compressible) {
switch (kind) {
case Byte:
case Short:
return graph.unique(new SignExtendNode(value, 32));
case Boolean:
case Char:
return graph.unique(new ZeroExtendNode(value, 32));
}
return value;
}
public ValueNode implicitStoreConvert(StructuredGraph graph, Kind kind, ValueNode value) {
return implicitStoreConvert(graph, kind, value, true);
}
protected ValueNode implicitStoreConvert(StructuredGraph graph, Kind kind, ValueNode value, @SuppressWarnings("unused") boolean compressible) {
switch (kind) {
case Boolean:
case Byte:
return graph.unique(new NarrowNode(value, 8));
case Char:
case Short:
return graph.unique(new NarrowNode(value, 16));
}
return value;
}
protected abstract ValueNode createReadHub(StructuredGraph graph, ValueNode object, GuardingNode guard);
protected abstract ValueNode createReadArrayComponentHub(StructuredGraph graph, ValueNode arrayHub, FixedNode anchor);
protected ConstantLocationNode createFieldLocation(StructuredGraph graph, ResolvedJavaField field, boolean initialization) {
int offset = fieldOffset(field);
if (offset >= 0) {
LocationIdentity loc = initialization ? initLocationIdentity() : field;
return ConstantLocationNode.create(loc, field.getKind(), offset, graph);
} else {
return null;
}
}
protected LocationNode createLocation(UnsafeAccessNode access) {
return createLocation(access.offset(), access.getLocationIdentity(), access.accessKind());
}
protected LocationNode createLocation(ValueNode offsetNode, LocationIdentity locationIdentity, Kind accessKind) {
ValueNode offset = offsetNode;
if (offset.isConstant()) {
long offsetValue = offset.asConstant().asLong();
return ConstantLocationNode.create(locationIdentity, accessKind, offsetValue, offset.graph());
}
long displacement = 0;
int indexScaling = 1;
boolean signExtend = false;
if (offset instanceof SignExtendNode) {
SignExtendNode extend = (SignExtendNode) offset;
if (extend.getResultBits() == 64) {
signExtend = true;
offset = extend.getInput();
}
}
if (offset instanceof IntegerAddNode) {
IntegerAddNode integerAddNode = (IntegerAddNode) offset;
if (integerAddNode.y() instanceof ConstantNode) {
displacement = integerAddNode.y().asConstant().asLong();
offset = integerAddNode.x();
}
}
if (offset instanceof LeftShiftNode) {
LeftShiftNode leftShiftNode = (LeftShiftNode) offset;
if (leftShiftNode.y() instanceof ConstantNode) {
long shift = leftShiftNode.y().asConstant().asLong();
if (shift >= 1 && shift <= 3) {
if (shift == 1) {
indexScaling = 2;
} else if (shift == 2) {
indexScaling = 4;
} else {
indexScaling = 8;
}
offset = leftShiftNode.x();
}
}
}
if (signExtend) {
// If we were using sign extended values before restore the sign extension.
offset = offset.graph().addOrUnique(new SignExtendNode(offset, 64));
}
return IndexedLocationNode.create(locationIdentity, accessKind, displacement, offset, offset.graph(), indexScaling);
}
public IndexedLocationNode createArrayLocation(Graph graph, Kind elementKind, ValueNode index, boolean initialization) {
LocationIdentity loc = initialization ? initLocationIdentity() : NamedLocationIdentity.getArrayLocation(elementKind);
return IndexedLocationNode.create(loc, elementKind, arrayBaseOffset(elementKind), index, graph, arrayScalingFactor(elementKind));
}
protected GuardingNode createBoundsCheck(AccessIndexedNode n, LoweringTool tool) {
StructuredGraph graph = n.graph();
ValueNode array = n.array();
ValueNode arrayLength = readArrayLength(n.graph(), array, tool.getConstantReflection());
if (arrayLength == null) {
Stamp stamp = StampFactory.positiveInt();
ReadNode readArrayLength = graph.add(new ReadNode(array, ConstantLocationNode.create(ARRAY_LENGTH_LOCATION, Kind.Int, arrayLengthOffset(), graph), stamp, BarrierType.NONE));
graph.addBeforeFixed(n, readArrayLength);
readArrayLength.setGuard(createNullCheck(array, readArrayLength, tool));
arrayLength = readArrayLength;
}
if (arrayLength.isConstant() && n.index().isConstant()) {
int l = arrayLength.asConstant().asInt();
int i = n.index().asConstant().asInt();
if (i >= 0 && i < l) {
// unneeded range check
return null;
}
}
return tool.createGuard(n, graph.unique(new IntegerBelowThanNode(n.index(), arrayLength)), BoundsCheckException, InvalidateReprofile);
}
protected GuardingNode createNullCheck(ValueNode object, FixedNode before, LoweringTool tool) {
if (StampTool.isObjectNonNull(object)) {
return null;
}
return tool.createGuard(before, before.graph().unique(new IsNullNode(object)), DeoptimizationReason.NullCheckException, DeoptimizationAction.InvalidateReprofile, true);
}
@Override
public ValueNode reconstructArrayIndex(LocationNode location) {
Kind elementKind = location.getValueKind();
assert location.getLocationIdentity().equals(NamedLocationIdentity.getArrayLocation(elementKind));
long base;
ValueNode index;
int scale = arrayScalingFactor(elementKind);
if (location instanceof ConstantLocationNode) {
base = ((ConstantLocationNode) location).getDisplacement();
index = null;
} else if (location instanceof IndexedLocationNode) {
IndexedLocationNode indexedLocation = (IndexedLocationNode) location;
assert indexedLocation.getIndexScaling() == scale;
base = indexedLocation.getDisplacement();
index = indexedLocation.getIndex();
} else {
throw GraalInternalError.shouldNotReachHere();
}
base -= arrayBaseOffset(elementKind);
assert base >= 0 && base % scale == 0;
base /= scale;
assert NumUtil.isInt(base);
StructuredGraph graph = location.graph();
if (index == null) {
return ConstantNode.forInt((int) base, graph);
} else {
if (base == 0) {
return index;
} else {
return IntegerArithmeticNode.add(graph, ConstantNode.forInt((int) base, graph), index);
}
}
}
}
|
UnsafeLoad: insert uncompress operation also for null constant
|
graal/com.oracle.graal.replacements/src/com/oracle/graal/replacements/DefaultJavaLoweringProvider.java
|
UnsafeLoad: insert uncompress operation also for null constant
|
|
Java
|
bsd-3-clause
|
c4c056558c24b1992b7b02de07cef305baf379aa
| 0
|
NCIP/c3pr,NCIP/c3pr,NCIP/c3pr
|
package edu.duke.cabig.c3pr.domain;
import gov.nih.nci.cabig.ctms.domain.AbstractMutableDomainObject;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import javax.persistence.*;
/**
* @author Vinay Gangoli
*/
@Entity
@Table (name = "strat_criterion_answer")
@GenericGenerator(name="id-generator", strategy = "native",
parameters = {
@Parameter(name="sequence", value="STRAT_CRIT_ANSWER_ID_SEQ")
}
)
public class StratificationCriterionAnswerCombination extends AbstractMutableDomainObject {
private StratificationCriterion stratificationCriterion;
private StratificationCriterionPermissibleAnswer stratificationCriterionPermissibleAnswer;
public StratificationCriterionAnswerCombination(){
}
public StratificationCriterionAnswerCombination(SubjectStratificationAnswer ssa){
this.stratificationCriterion = ssa.getStratificationCriterion();
this.stratificationCriterionPermissibleAnswer = ssa.getStratificationCriterionAnswer();
}
@ManyToOne (fetch=FetchType.LAZY)
@JoinColumn(name = "sc_id")
@Cascade(value = { CascadeType.SAVE_UPDATE, CascadeType.MERGE})
public StratificationCriterion getStratificationCriterion() {
return stratificationCriterion;
}
public void setStratificationCriterion(
StratificationCriterion stratificationCriterion) {
this.stratificationCriterion = stratificationCriterion;
}
@ManyToOne (fetch=FetchType.LAZY)
@JoinColumn(name = "scpa_id")
@Cascade(value = { CascadeType.SAVE_UPDATE, CascadeType.MERGE})
public StratificationCriterionPermissibleAnswer getStratificationCriterionPermissibleAnswer() {
return stratificationCriterionPermissibleAnswer;
}
public void setStratificationCriterionPermissibleAnswer(
StratificationCriterionPermissibleAnswer stratificationCriterionPermissibleAnswer) {
this.stratificationCriterionPermissibleAnswer = stratificationCriterionPermissibleAnswer;
}
@Override
public int hashCode() {
final int PRIME = 31;
int result = super.hashCode();
result = PRIME
* result
+ (stratificationCriterion.getQuestionText().hashCode())
+ (stratificationCriterionPermissibleAnswer.getPermissibleAnswer().hashCode());
return result;
}
/*
* NOTE: As per this method two Stratum Groups are considered equal if they have the same question/answer combination.
* In other words if they have the same stratification_cri_ans_combination.
*/
@Override
public boolean equals(Object obj){
if (this == obj)
return true;
// if (!super.equals(obj))
// return false;
if (getClass() != obj.getClass())
return false;
if(obj instanceof StratificationCriterionAnswerCombination){
StratificationCriterionAnswerCombination scac = (StratificationCriterionAnswerCombination)obj;
if(scac.getStratificationCriterion().getQuestionText().equals(this.getStratificationCriterion().getQuestionText()) &&
scac.getStratificationCriterionPermissibleAnswer().getPermissibleAnswer().equals(this.getStratificationCriterionPermissibleAnswer().getPermissibleAnswer()) ){
return true;
}
// if(this.getStratificationCriterion().equals(sg.getStratificationCriterion()) &&
// this.getStratificationCriterionPermissibleAnswer().equals(sg.getStratificationCriterionPermissibleAnswer())){
// return true;
// }
}
return false;
}
}
|
codebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/StratificationCriterionAnswerCombination.java
|
package edu.duke.cabig.c3pr.domain;
import gov.nih.nci.cabig.ctms.domain.AbstractMutableDomainObject;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
/**
* @author Vinay Gangoli
*/
@Entity
@Table (name = "stratification_criterion_answer_combination")
@GenericGenerator(name="id-generator", strategy = "native",
parameters = {
@Parameter(name="sequence", value="STRATIFICATION_CRITERION_ANSWER_COMBINATION_ID_SEQ")
}
)
public class StratificationCriterionAnswerCombination extends AbstractMutableDomainObject {
private StratificationCriterion stratificationCriterion;
private StratificationCriterionPermissibleAnswer stratificationCriterionPermissibleAnswer;
public StratificationCriterionAnswerCombination(){
}
public StratificationCriterionAnswerCombination(SubjectStratificationAnswer ssa){
this.stratificationCriterion = ssa.getStratificationCriterion();
this.stratificationCriterionPermissibleAnswer = ssa.getStratificationCriterionAnswer();
}
@ManyToOne (fetch=FetchType.LAZY)
@JoinColumn(name = "sc_id")
@Cascade(value = { CascadeType.SAVE_UPDATE, CascadeType.MERGE})
public StratificationCriterion getStratificationCriterion() {
return stratificationCriterion;
}
public void setStratificationCriterion(
StratificationCriterion stratificationCriterion) {
this.stratificationCriterion = stratificationCriterion;
}
@ManyToOne (fetch=FetchType.LAZY)
@JoinColumn(name = "scpa_id")
@Cascade(value = { CascadeType.SAVE_UPDATE, CascadeType.MERGE})
public StratificationCriterionPermissibleAnswer getStratificationCriterionPermissibleAnswer() {
return stratificationCriterionPermissibleAnswer;
}
public void setStratificationCriterionPermissibleAnswer(
StratificationCriterionPermissibleAnswer stratificationCriterionPermissibleAnswer) {
this.stratificationCriterionPermissibleAnswer = stratificationCriterionPermissibleAnswer;
}
@Override
public int hashCode() {
final int PRIME = 31;
int result = super.hashCode();
result = PRIME
* result
+ (stratificationCriterion.getQuestionText().hashCode())
+ (stratificationCriterionPermissibleAnswer.getPermissibleAnswer().hashCode());
return result;
}
/*
* NOTE: As per this method two Stratum Groups are considered equal if they have the same question/answer combination.
* In other words if they have the same stratification_cri_ans_combination.
*/
@Override
public boolean equals(Object obj){
if (this == obj)
return true;
// if (!super.equals(obj))
// return false;
if (getClass() != obj.getClass())
return false;
if(obj instanceof StratificationCriterionAnswerCombination){
StratificationCriterionAnswerCombination scac = (StratificationCriterionAnswerCombination)obj;
if(scac.getStratificationCriterion().getQuestionText().equals(this.getStratificationCriterion().getQuestionText()) &&
scac.getStratificationCriterionPermissibleAnswer().getPermissibleAnswer().equals(this.getStratificationCriterionPermissibleAnswer().getPermissibleAnswer()) ){
return true;
}
// if(this.getStratificationCriterion().equals(sg.getStratificationCriterion()) &&
// this.getStratificationCriterionPermissibleAnswer().equals(sg.getStratificationCriterionPermissibleAnswer())){
// return true;
// }
}
return false;
}
}
|
refactor table name
|
codebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/StratificationCriterionAnswerCombination.java
|
refactor table name
|
|
Java
|
bsd-3-clause
|
cafb2b588d086d7051854b456f7b02bf79341318
| 0
|
NCIP/camod,NCIP/camod,NCIP/camod,NCIP/camod
|
/**
* @author sguruswami
*
* $Id: ViewModelAction.java,v 1.69 2009-06-01 17:02:46 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.68 2009/05/20 17:16:34 pandyas
* modified for gforge #17325 Upgrade caMOD to use caBIO 4.x and EVS 4.x to get data
*
* Revision 1.67 2009/03/25 16:24:58 pandyas
* modified for #17833 Make sure all references to Tranplantation are properly named
*
* Revision 1.66 2009/03/13 17:03:46 pandyas
* modified for #19205 Sort therapies in the order they are entered
*
* Revision 1.65 2008/08/14 17:07:03 pandyas
* remove debug line
*
* Revision 1.64 2008/08/14 17:01:42 pandyas
* modified debug line to use log
*
* Revision 1.63 2008/08/01 14:15:10 pandyas
* Modifed to prevent SQL inject - added HTTP Header clean
* App scan performed on July 30, 2008
*
* Revision 1.62 2008/07/28 17:19:02 pandyas
* Modifed to prevent SQL inject - added HTTP Header
* App scan performed on July 24, 2008
*
* Revision 1.61 2008/07/21 18:08:31 pandyas
* Modified to prevent SQL injection
* Scan performed on July 21, 2008
*
* Revision 1.60 2008/07/17 19:05:26 pandyas
* Modified to clean header to prevent SQL injection/Cross-Site Scripting
* Scan performed on July 16, 2008 by IRT
*
* Revision 1.59 2008/06/30 18:18:28 pandyas
* Removed code originally added for security scan when it caused null pointer errors
*
* Revision 1.58 2008/06/30 15:29:05 pandyas
* Modified to prevent Cross-Site Scripting
* Cleaned parameter name before proceeding
* Fixed code added in previous version
*
* Revision 1.57 2008/05/27 14:36:40 pandyas
* Modified to prevent SQL injection
* Cleaned HTTP Header before proceeding
* Re: Apps Scan run 05/23/2008
*
* Revision 1.56 2008/02/05 17:10:09 pandyas
* Removed debug statement for build to dev
*
* Revision 1.55 2008/02/05 17:09:34 pandyas
* Removed debug statement for build to dev
*
* Revision 1.54 2008/01/31 22:27:52 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.53 2008/01/31 22:23:22 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.52 2008/01/31 17:09:54 pandyas
* Modified to send new gene identifier (entrez gene id) to caBIO from new object location
*
* Revision 1.51 2008/01/28 18:45:18 pandyas
* Modified to debug caBIO data not returning to caMOD on dev
*
* Revision 1.50 2008/01/16 20:09:31 pandyas
* removed caBIO logging so the page renders when connection to caBIO fails
*
* Revision 1.49 2008/01/16 18:29:57 pandyas
* Renamed value to Transplant for #8290
*
* Revision 1.48 2008/01/10 15:55:01 pandyas
* modify output for final dev deployment
*
* Revision 1.47 2008/01/02 17:57:44 pandyas
* modified for #816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.46 2007/12/27 22:32:33 pandyas
* Modified for feature #8816 Connection to caELMIR - retrieve data for therapy search page
* Also added code to display Therapy link when only caELMIR data is available for a study
*
* Revision 1.45 2007/12/27 21:44:00 pandyas
* re-commit - changes did not show up in project
*
* Revision 1.44 2007/12/18 13:31:32 pandyas
* Added populate method for study data from caELMIRE for integration of Therapy study data
*
* Revision 1.43 2007/12/17 18:03:22 pandyas
* Removed * in searchFilter used for getting e-mail from LDAP
* Apps Support ticket was submitted (31169 - incorrect e-mail associated with my caMOD account) stating:
*
* Cheryl Marks submitted a ticket to NCICB Application Support in which she requested that the e-mail address associated with her account in the "User Settings" screen in caMOD be corrected. She has attempted to correct it herself, but because the program queries the LDAP Server for the e-mail address, her corrections were not retained.
*
* Revision 1.42 2007/12/04 13:49:19 pandyas
* Modified code for #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.41 2007/11/25 23:34:23 pandyas
* Initial version for feature #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.40 2007/10/31 18:39:30 pandyas
* Fixed #8188 Rename UnctrlVocab items to text entries
* Fixed #8290 Rename graft object into transplant object
*
* Revision 1.39 2007/09/14 18:53:37 pandyas
* Fixed Bug #8954: link to invivo detail page does not work
*
* Revision 1.38 2007/09/12 19:36:40 pandyas
* modified debug statements for build to stage tier
*
* Revision 1.37 2007/08/07 19:49:46 pandyas
* Removed reference to Transplant as per VCDE comments and after modification to object definition for CDE
*
* Revision 1.36 2007/08/07 18:26:20 pandyas
* Renamed to GRAFT as per VCDE comments
*
* Revision 1.35 2007/07/31 12:02:55 pandyas
* VCDE silver level and caMOD 2.3 changes
*
* Revision 1.34 2007/06/19 20:42:59 pandyas
* Users not logged in can not access the session property to check the model species. Therefore, we must show the attribute for all models.
*
* Revision 1.33 2007/06/19 18:39:21 pandyas
* Constant for species common name needs to be set for viewModelCharacteristics so it shows up for Zebrafish models
*
* Revision 1.32 2006/08/17 18:10:44 pandyas
* Defect# 410: Externalize properties files - Code changes to get properties
*
* Revision 1.31 2006/05/24 18:37:27 georgeda
* Workaround for bug in caBIO
*
* Revision 1.30 2006/05/09 18:57:54 georgeda
* Changes for searching on transient interfaces
*
* Revision 1.29 2006/05/08 13:43:15 georgeda
* Reformat and clean up warnings
*
* Revision 1.28 2006/04/19 19:31:58 georgeda
* Fixed display issue w/ GeneDelivery
*
* Revision 1.27 2006/04/19 18:50:01 georgeda
* Fixed issue w/ engineered genes displaying
*
* Revision 1.26 2006/04/17 19:09:41 pandyas
* caMod 2.1 OM changes
*
* Revision 1.25 2005/11/21 18:38:31 georgeda
* Defect #35. Trim whitespace from items that are freeform text
*
* Revision 1.24 2005/11/15 22:13:46 georgeda
* Cleanup of drug screening
*
* Revision 1.23 2005/11/14 14:21:44 georgeda
* Added sorting and spontaneous mutation
*
* Revision 1.22 2005/11/11 18:39:30 georgeda
* Removed unneeded call
*
* Revision 1.21 2005/11/10 22:07:36 georgeda
* Fixed part of bug #21
*
* Revision 1.20 2005/11/10 18:12:23 georgeda
* Use constant
*
* Revision 1.19 2005/11/07 13:57:39 georgeda
* Minor tweaks
*
* Revision 1.18 2005/11/03 15:47:11 georgeda
* Fixed slow invivo results
*
* Revision 1.17 2005/10/27 18:13:48 guruswas
* Show all publications in the publications display page.
*
* Revision 1.16 2005/10/20 21:35:37 georgeda
* Fixed xenograft display bug
*
* Revision 1.15 2005/10/19 18:56:00 guruswas
* implemented invivo details page
*
* Revision 1.14 2005/10/11 18:15:25 georgeda
* More comment changes
*
* Revision 1.13 2005/10/10 14:12:24 georgeda
* Changes for comment curation
*
* Revision 1.12 2005/10/07 21:15:03 georgeda
* Added caarray variables
*
* Revision 1.11 2005/10/06 13:37:01 georgeda
* Removed informational message
*
* Revision 1.10 2005/09/30 18:42:24 guruswas
* intial implementation of drug screening search and display page
*
* Revision 1.9 2005/09/22 21:34:51 guruswas
* First stab at carcinogenic intervention pages
*
* Revision 1.8 2005/09/22 15:23:41 georgeda
* Cleaned up warnings
*
* Revision 1.7 2005/09/21 21:02:24 guruswas
* Display the organ, disease names from NCI Thesaurus
*
* Revision 1.6 2005/09/21 20:47:16 georgeda
* Cleaned up
*
* Revision 1.5 2005/09/16 19:30:00 guruswas
* Display invivo data (from DTP) in the therapuetic approaches page
*
* Revision 1.4 2005/09/16 15:52:56 georgeda
* Changes due to manager re-write
*
*
*/
package gov.nih.nci.camod.webapp.action;
import edu.wustl.common.util.CaElmirInterfaceManager;
import gov.nih.nci.cabio.domain.Gene;
import gov.nih.nci.camod.Constants;
import gov.nih.nci.camod.domain.Agent;
import gov.nih.nci.camod.domain.AnimalModel;
import gov.nih.nci.camod.domain.CaelmirStudyData;
import gov.nih.nci.camod.domain.CarcinogenExposure;
import gov.nih.nci.camod.domain.Comments;
import gov.nih.nci.camod.domain.EngineeredGene;
import gov.nih.nci.camod.domain.GeneIdentifier;
import gov.nih.nci.camod.domain.GenomicSegment;
import gov.nih.nci.camod.domain.Transplantation;
import gov.nih.nci.camod.domain.InducedMutation;
import gov.nih.nci.camod.domain.Person;
import gov.nih.nci.camod.domain.SpontaneousMutation;
import gov.nih.nci.camod.domain.TargetedModification;
import gov.nih.nci.camod.domain.Therapy;
import gov.nih.nci.camod.domain.Transgene;
import gov.nih.nci.camod.service.AgentManager;
import gov.nih.nci.camod.service.AnimalModelManager;
import gov.nih.nci.camod.service.CommentsManager;
import gov.nih.nci.camod.service.PersonManager;
import gov.nih.nci.camod.service.TransplantationManager;
import gov.nih.nci.camod.service.impl.QueryManagerSingleton;
import gov.nih.nci.camod.util.SafeHTMLUtil;
import gov.nih.nci.common.domain.DatabaseCrossReference;
import gov.nih.nci.system.applicationservice.CaBioApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
public class ViewModelAction extends BaseAction
{
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
*/
private void setCancerModel(HttpServletRequest request)
{
String modelID = request.getParameter(Constants.Parameters.MODELID);
log.debug("<setCancerModel> modelID: " + modelID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = null;
try
{
am = animalModelManager.get(modelID);
}
catch (Exception e)
{
log.error("Unable to get cancer model in setCancerModel");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
// Set model id to display on subViewModelMenu on left menu bar
request.getSession().setAttribute(Constants.MODELID, am.getId().toString());
}
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
* @throws Exception
*/
private void setComments(HttpServletRequest request,
String inSection) throws Exception
{
String theCommentsId = request.getParameter(Constants.Parameters.COMMENTSID);
CommentsManager theCommentsManager = (CommentsManager) getBean("commentsManager");
log.debug("Comments id: " + theCommentsId);
List<Comments> theCommentsList = new ArrayList<Comments>();
if (theCommentsId != null && theCommentsId.length() > 0)
{
Comments theComments = theCommentsManager.get(theCommentsId);
if (theComments != null)
{
log.debug("Found a comment: " + theComments.getRemark());
theCommentsList.add(theComments);
}
}
// Get all comments that are either approved or owned by this user
else
{
PersonManager thePersonManager = (PersonManager) getBean("personManager");
Person theCurrentUser = thePersonManager.getByUsername((String) request.getSession().getAttribute(Constants.CURRENTUSER));
AnimalModel theAnimalModel = (AnimalModel) request.getSession().getAttribute(Constants.ANIMALMODEL);
theCommentsList = theCommentsManager.getAllBySection(inSection, theCurrentUser, theAnimalModel);
}
request.setAttribute(Constants.Parameters.COMMENTSLIST, theCommentsList);
}
/**
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward populateModelCharacteristics(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
request.getSession(true);
try {
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.debug("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// get and clean header to prevent SQL injection
if (request.getHeader("Referer") != null){
sID = request.getHeader("Referer");
log.debug("cleaned Referer: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.debug("populateModelCharacteristics headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("populateModelCharacteristics cleaned headername: " + headername);
}
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.info("methodName: " + methodName);
if (!methodName.equals("populateModelCharacteristics")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("cleaned methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.MODEL_CHARACTERISTICS);
// Call method so therapy link displays for models with caELMIR-only data
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
}
catch (Exception e)
{
log.error("Error in populateModelCharacteristics", e);
}
return mapping.findForward("viewModelCharacteristics");
}
/**
*
*/
public ActionForward populateEngineeredGene(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateEngineeredGene> modelID" + request.getParameter("aModelID"));
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateEngineeredGene")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("aModelID");
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set egc = am.getEngineeredGeneCollection();
final int egcCnt = (egc != null) ? egc.size() : 0;
final List<EngineeredGene> tgc = new ArrayList<EngineeredGene>();
int tgCnt = 0;// Transgene
final List<EngineeredGene> gsc = new ArrayList<EngineeredGene>();
int gsCnt = 0;// GenomicSegment
final List<EngineeredGene> tmc = new ArrayList<EngineeredGene>();
int tmCnt = 0;// TargetedModification
final Map<Long, Gene> tmGeneMap = new HashMap<Long, Gene>();
final List<EngineeredGene> imc = new ArrayList<EngineeredGene>();
final List<SpontaneousMutation> smc = new ArrayList<SpontaneousMutation>(am.getSpontaneousMutationCollection());
Iterator it = egc.iterator();
int imCnt = 0;// InducedMutation
while (it.hasNext())
{
EngineeredGene eg = (EngineeredGene) it.next();
if (eg instanceof Transgene)
{
tgc.add(eg);
tgCnt++;
}
else if (eg instanceof GenomicSegment)
{
gsc.add(eg);
gsCnt++;
}
else if (eg instanceof TargetedModification)
{
tmc.add(eg);
tmCnt++;
// now go to caBIO and query the gene object....
TargetedModification tm = (TargetedModification) eg;
GeneIdentifier geneIdentifier = tm.getGeneIdentifier();
if (geneIdentifier != null)
{
log.info("Connecting to caBIO to look up gene " + geneIdentifier);
// the geneId is available
try
{
CaBioApplicationService appService = (CaBioApplicationService)ApplicationServiceProvider.getApplicationService();
log.info("appService: " + appService.toString());
DatabaseCrossReference dcr = new DatabaseCrossReference();
dcr.setCrossReferenceId(geneIdentifier.getEntrezGeneID());
dcr.setType("gov.nih.nci.cabio.domain.Gene");
dcr.setDataSourceName("LOCUS_LINK_ID");
List<DatabaseCrossReference> cfcoll = new ArrayList<DatabaseCrossReference>();
cfcoll.add(dcr);
log.info("cfcoll.size(): " + cfcoll.size());
Gene myGene = new Gene();
myGene.setDatabaseCrossReferenceCollection(cfcoll);
List resultList = appService.search(Gene.class, myGene);
log.info("resultList.size(): " + resultList.size());
final int geneCount = (resultList != null) ? resultList.size() : 0;
log.info("Got " + geneCount + " Gene Objects");
if (geneCount > 0)
{
myGene = (Gene) resultList.get(0);
log.info("Gene:" + geneIdentifier + " ==>" + myGene);
tmGeneMap.put(tm.getId(), myGene);
}
}
catch (Exception e)
{
log.error("ViewModelAction Unable to get information from caBIO", e);
}
}
}
else if (eg instanceof InducedMutation)
{
imc.add(eg);
imCnt++;
}
}
log.info("<populateEngineeredGene> " + "egcCnt=" + egcCnt + "tgc=" + tgCnt + "gsc=" + gsCnt + "tmc=" + tmCnt + "imc=" + imCnt);
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
request.getSession().setAttribute(Constants.TRANSGENE_COLL, tgc);
request.getSession().setAttribute(Constants.GENOMIC_SEG_COLL, gsc);
request.getSession().setAttribute(Constants.TARGETED_MOD_COLL, tmc);
request.getSession().setAttribute(Constants.TARGETED_MOD_GENE_MAP, tmGeneMap);
request.getSession().setAttribute(Constants.INDUCED_MUT_COLL, imc);
request.getSession().setAttribute(Constants.SPONTANEOUS_MUT_COLL, smc);
log.debug("<populateEngineeredGene> set attributes done.");
setComments(request, Constants.Pages.GENETIC_DESCRIPTION);
return mapping.findForward("viewGeneticDescription");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCarcinogenicInterventions(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCarcinogenicInterventions")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set ceColl = am.getCarcinogenExposureCollection();
Iterator it = ceColl.iterator();
final Map<String, List<Object>> interventionTypeMap = new HashMap<String, List<Object>>();
while (it.hasNext())
{
CarcinogenExposure ce = (CarcinogenExposure) it.next();
if (ce != null)
{
log.debug("Checking agent:" + ce.getEnvironmentalFactor().getNscNumber());
String theType = ce.getEnvironmentalFactor().getType();
if (theType == null || theType.length() == 0)
{
theType = ce.getEnvironmentalFactor().getTypeAlternEntry();
if (theType == null || theType.length() == 0)
{
theType = "Not specified";
}
}
List<Object> theTypeColl = interventionTypeMap.get(theType);
if (theTypeColl == null)
{
theTypeColl = new ArrayList<Object>();
interventionTypeMap.put(theType, theTypeColl);
}
theTypeColl.add(ce);
}
}
if (am.getGeneDeliveryCollection().size() > 0)
{
List<Object> theGeneDeliveryCollection = new ArrayList<Object>(am.getGeneDeliveryCollection());
interventionTypeMap.put("GeneDelivery", theGeneDeliveryCollection);
}
request.getSession().setAttribute(Constants.CARCINOGENIC_INTERVENTIONS_COLL, interventionTypeMap);
setComments(request, Constants.Pages.CARCINOGENIC_INTERVENTION);
return mapping.findForward("viewCarcinogenicInterventions");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populatePublications(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populatePublications")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter("aModelID");
List pubs = null;
try
{
pubs = QueryManagerSingleton.instance().getAllPublications(Long.valueOf(modelID).longValue());
log.debug("pubs.size(): " + pubs.size());
}
catch (Exception e)
{
log.error("Unable to get publications");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.PUBLICATIONS, pubs);
setComments(request, Constants.Pages.PUBLICATIONS);
return mapping.findForward("viewPublications");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateHistopathology(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateHistopathology")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.HISTOPATHOLOGY);
return mapping.findForward("viewHistopathology");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTherapeuticApproaches(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.info("<ViewModelAction> populateTherapeuticApproaches");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTherapeuticApproaches")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//
// query caBIO and load clinical protocols information
// store clinicalProtocol info in a hashmap keyed by NSC#
//
final HashMap<Long, Collection> clinProtocols = new HashMap<Long, Collection>();
final HashMap<Long, Collection> yeastResults = new HashMap<Long, Collection>();
final HashMap<Long, Collection> invivoResults = new HashMap<Long, Collection>();
final List<Therapy> therapeuticApprochesColl = new ArrayList<Therapy>();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set therapyColl = am.getTherapyCollection();
Iterator it = therapyColl.iterator();
final int cc = (therapyColl != null) ? therapyColl.size() : 0;
log.info("Looking up clinical protocols for " + cc + " agents...");
while (it.hasNext())
{
Therapy t = (Therapy) it.next();
if (t != null)
{
therapeuticApprochesColl.add(t);
}
// Sort therapy in order entered as requested by user
Collections.sort(therapeuticApprochesColl);
log.info("therapeuticApprochesColl: " + therapeuticApprochesColl.toString());
Agent a = t.getAgent();
AgentManager myAgentManager = (AgentManager) getBean("agentManager");
if (a != null)
{
Long nscNumber = a.getNscNumber();
if (nscNumber != null)
{
log.info("nscNumber: " + nscNumber);
Collection protocols = myAgentManager.getClinicalProtocols(a);
clinProtocols.put(nscNumber, protocols);
log.info("clinProtocols.size(): " + clinProtocols.size());
// get the yeast data
log.info("ViewModelAction.populateThearapeuticApproaches() calls AgentManager to get yeast data with useNscNumber=true.");
List yeastStages = myAgentManager.getYeastResults(a, true);
log.info("yeastStages.size(): " + yeastStages.size());
if (yeastStages.size() > 0)
{
yeastResults.put(a.getId(), yeastStages);
}
// now get invivo/Transplantation data
log.info("ViewModelAction.populateThearapeuticApproaches() calls QueryManager....getInvivoResults() with useNscNumber=true.");
List transplantationResults = QueryManagerSingleton.instance().getInvivoResults(a, true);
log.info("transplantationResults.size(): " + transplantationResults.size());
invivoResults.put(a.getId(), transplantationResults);
}
}
}
request.getSession().setAttribute(Constants.THERAPEUTIC_APPROACHES_COLL, therapeuticApprochesColl);
request.getSession().setAttribute(Constants.CLINICAL_PROTOCOLS, clinProtocols);
request.getSession().setAttribute(Constants.YEAST_DATA, yeastResults);
request.getSession().setAttribute(Constants.INVIVO_DATA, invivoResults);
setComments(request, Constants.Pages.THERAPEUTIC_APPROACHES);
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCaelmirTherapyDetails(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCaelmirTherapyDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
JSONArray jsonArray = new JSONArray();
JSONObject jobj = new JSONObject();
Vector h = new Vector();
ArrayList caelmirStudyData = new ArrayList();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager theAnimalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel theAnimalModel = theAnimalModelManager.get(modelID);
try {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter try");
// Link to the inteface provided by caElmir
URL url = new URL("http://chichen-itza.compmed.ucdavis.edu:8080/"
+ CaElmirInterfaceManager.getStudyInfoUrl());
// set your proxy server and port
//System.setProperty("proxyHost", "ptproxy.persistent.co.in");
//System.setProperty("proxyPort", "8080");
URLConnection urlConnection = url.openConnection();
//log.debug("populateCaelmirTherapyDetails open connection");
// needs to be set to True for writing to the output stream.This
// allows to pass data to the url.
urlConnection.setDoOutput(true);
JSONObject jsonObj = new JSONObject();
// setting the model id.
jsonObj.put(CaElmirInterfaceManager.getModelIdParameter(), modelID);
PrintWriter out = new PrintWriter(urlConnection.getOutputStream());
out.write(jsonObj.toString());
out.flush();
out.close();
//log.debug("populateCaelmirTherapyDetails created JSONObject");
// start reading the responce
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(urlConnection.getInputStream()));
if (bufferedReader != null) {
String resultStr = (String) bufferedReader.readLine();
jsonArray = new JSONArray(resultStr);
String status = null;
status = ((JSONObject) jsonArray.get(0)).get(
CaElmirInterfaceManager.getStatusMessageKey())
.toString();
//log.debug("populateCaelmirTherapyDetails status: " + status);
// Imporatant:first check for the status
if (!CaElmirInterfaceManager.getSuccessKey().equals(status)) {
// prints the status
log.info(status);
}
CaelmirStudyData studyData = new CaelmirStudyData();
// start reading study data from index 1
for (int i = 1; i < jsonArray.length(); i++) {
jobj = (JSONObject) jsonArray.get(i);
studyData = new CaelmirStudyData();
studyData.setDescription(jobj.getString(CaElmirInterfaceManager.getStudyDesrciptionKey()));
studyData.setEmail(jobj.getString(CaElmirInterfaceManager.getEmailKey()));
studyData.setHypothesis(jobj.getString(CaElmirInterfaceManager.getStudyHypothesisKey()));
studyData.setInstitution(jobj.getString(CaElmirInterfaceManager.getInstitutionKey()));
studyData.setInvestigatorName(jobj.getString(CaElmirInterfaceManager.getPrimaryInvestigatorKey()));
studyData.setStudyName(jobj.getString(CaElmirInterfaceManager.getStudyName()));
studyData.setUrl(jobj.getString(CaElmirInterfaceManager.getStudyUrlKey()));
caelmirStudyData.add(studyData);
}
}
} catch (MalformedURLException me) {
log.debug("MalformedURLException: " + me);
} catch (IOException ioe) {
log.debug("IOException: " + ioe);
}
// Set collection so therapy link will display if caELMIR data is available
// Needed for models with caELMIR data but no caMOD data
theAnimalModel.setCaelmirStudyDataCollection(caelmirStudyData);
request.getSession().setAttribute(Constants.CAELMIR_STUDY_DATA,
caelmirStudyData);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCellLines(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCellLines")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.CELL_LINES);
return mapping.findForward("viewCellLines");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransientInterference(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransientInterference")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSIENT_INTERFERENCE);
return mapping.findForward("viewTransientInterference");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateImages(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateImages")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.IMAGES);
return mapping.findForward("viewImages");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateMicroarrays(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateMicroarrays")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//Get external properties file
Properties camodProperties = new Properties();
String camodPropertiesFileName = null;
camodPropertiesFileName = System.getProperty("gov.nih.nci.camod.camodProperties");
try {
FileInputStream in = new FileInputStream(camodPropertiesFileName);
camodProperties.load(in);
}
catch (FileNotFoundException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
} catch (IOException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
}
request.setAttribute("uri_start", camodProperties.getProperty("caarray.uri_start"));
request.setAttribute("uri_end", camodProperties.getProperty("caarray.uri_end"));
setComments(request, Constants.Pages.MICROARRAY);
return mapping.findForward("viewMicroarrays");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantation(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantation> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantation")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSPLANTATION);
log.debug("<populateTransplantation> Exit:");
return mapping.findForward("viewTransplantation");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantationDetails(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantationDetails> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantationDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("tModelID");
request.getSession().setAttribute(Constants.MODELID, modelID);
String nsc = request.getParameter("nsc");
if (nsc != null && nsc.length() == 0)
return mapping.findForward("viewModelCharacteristics");
log.debug("<populateTransplantationDetails> modelID:" + modelID);
log.debug("<populateTransplantationDetails> nsc:" + nsc);
TransplantationManager mgr = (TransplantationManager) getBean("transplantationManager");
Transplantation t = mgr.get(modelID);
request.getSession().setAttribute(Constants.TRANSPLANTATIONMODEL, t);
request.getSession().setAttribute(Constants.NSC_NUMBER, nsc);
request.getSession().setAttribute(Constants.TRANSPLANTATIONRESULTLIST, t.getInvivoResultCollectionByNSC(nsc));
return mapping.findForward("viewInvivoDetails");
}
}
|
software/camod/src/gov/nih/nci/camod/webapp/action/ViewModelAction.java
|
/**
* @author sguruswami
*
* $Id: ViewModelAction.java,v 1.69 2009-06-01 17:02:46 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.68 2009/05/20 17:16:34 pandyas
* modified for gforge #17325 Upgrade caMOD to use caBIO 4.x and EVS 4.x to get data
*
* Revision 1.67 2009/03/25 16:24:58 pandyas
* modified for #17833 Make sure all references to Tranplantation are properly named
*
* Revision 1.66 2009/03/13 17:03:46 pandyas
* modified for #19205 Sort therapies in the order they are entered
*
* Revision 1.65 2008/08/14 17:07:03 pandyas
* remove debug line
*
* Revision 1.64 2008/08/14 17:01:42 pandyas
* modified debug line to use log
*
* Revision 1.63 2008/08/01 14:15:10 pandyas
* Modifed to prevent SQL inject - added HTTP Header clean
* App scan performed on July 30, 2008
*
* Revision 1.62 2008/07/28 17:19:02 pandyas
* Modifed to prevent SQL inject - added HTTP Header
* App scan performed on July 24, 2008
*
* Revision 1.61 2008/07/21 18:08:31 pandyas
* Modified to prevent SQL injection
* Scan performed on July 21, 2008
*
* Revision 1.60 2008/07/17 19:05:26 pandyas
* Modified to clean header to prevent SQL injection/Cross-Site Scripting
* Scan performed on July 16, 2008 by IRT
*
* Revision 1.59 2008/06/30 18:18:28 pandyas
* Removed code originally added for security scan when it caused null pointer errors
*
* Revision 1.58 2008/06/30 15:29:05 pandyas
* Modified to prevent Cross-Site Scripting
* Cleaned parameter name before proceeding
* Fixed code added in previous version
*
* Revision 1.57 2008/05/27 14:36:40 pandyas
* Modified to prevent SQL injection
* Cleaned HTTP Header before proceeding
* Re: Apps Scan run 05/23/2008
*
* Revision 1.56 2008/02/05 17:10:09 pandyas
* Removed debug statement for build to dev
*
* Revision 1.55 2008/02/05 17:09:34 pandyas
* Removed debug statement for build to dev
*
* Revision 1.54 2008/01/31 22:27:52 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.53 2008/01/31 22:23:22 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.52 2008/01/31 17:09:54 pandyas
* Modified to send new gene identifier (entrez gene id) to caBIO from new object location
*
* Revision 1.51 2008/01/28 18:45:18 pandyas
* Modified to debug caBIO data not returning to caMOD on dev
*
* Revision 1.50 2008/01/16 20:09:31 pandyas
* removed caBIO logging so the page renders when connection to caBIO fails
*
* Revision 1.49 2008/01/16 18:29:57 pandyas
* Renamed value to Transplant for #8290
*
* Revision 1.48 2008/01/10 15:55:01 pandyas
* modify output for final dev deployment
*
* Revision 1.47 2008/01/02 17:57:44 pandyas
* modified for #816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.46 2007/12/27 22:32:33 pandyas
* Modified for feature #8816 Connection to caELMIR - retrieve data for therapy search page
* Also added code to display Therapy link when only caELMIR data is available for a study
*
* Revision 1.45 2007/12/27 21:44:00 pandyas
* re-commit - changes did not show up in project
*
* Revision 1.44 2007/12/18 13:31:32 pandyas
* Added populate method for study data from caELMIRE for integration of Therapy study data
*
* Revision 1.43 2007/12/17 18:03:22 pandyas
* Removed * in searchFilter used for getting e-mail from LDAP
* Apps Support ticket was submitted (31169 - incorrect e-mail associated with my caMOD account) stating:
*
* Cheryl Marks submitted a ticket to NCICB Application Support in which she requested that the e-mail address associated with her account in the "User Settings" screen in caMOD be corrected. She has attempted to correct it herself, but because the program queries the LDAP Server for the e-mail address, her corrections were not retained.
*
* Revision 1.42 2007/12/04 13:49:19 pandyas
* Modified code for #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.41 2007/11/25 23:34:23 pandyas
* Initial version for feature #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.40 2007/10/31 18:39:30 pandyas
* Fixed #8188 Rename UnctrlVocab items to text entries
* Fixed #8290 Rename graft object into transplant object
*
* Revision 1.39 2007/09/14 18:53:37 pandyas
* Fixed Bug #8954: link to invivo detail page does not work
*
* Revision 1.38 2007/09/12 19:36:40 pandyas
* modified debug statements for build to stage tier
*
* Revision 1.37 2007/08/07 19:49:46 pandyas
* Removed reference to Transplant as per VCDE comments and after modification to object definition for CDE
*
* Revision 1.36 2007/08/07 18:26:20 pandyas
* Renamed to GRAFT as per VCDE comments
*
* Revision 1.35 2007/07/31 12:02:55 pandyas
* VCDE silver level and caMOD 2.3 changes
*
* Revision 1.34 2007/06/19 20:42:59 pandyas
* Users not logged in can not access the session property to check the model species. Therefore, we must show the attribute for all models.
*
* Revision 1.33 2007/06/19 18:39:21 pandyas
* Constant for species common name needs to be set for viewModelCharacteristics so it shows up for Zebrafish models
*
* Revision 1.32 2006/08/17 18:10:44 pandyas
* Defect# 410: Externalize properties files - Code changes to get properties
*
* Revision 1.31 2006/05/24 18:37:27 georgeda
* Workaround for bug in caBIO
*
* Revision 1.30 2006/05/09 18:57:54 georgeda
* Changes for searching on transient interfaces
*
* Revision 1.29 2006/05/08 13:43:15 georgeda
* Reformat and clean up warnings
*
* Revision 1.28 2006/04/19 19:31:58 georgeda
* Fixed display issue w/ GeneDelivery
*
* Revision 1.27 2006/04/19 18:50:01 georgeda
* Fixed issue w/ engineered genes displaying
*
* Revision 1.26 2006/04/17 19:09:41 pandyas
* caMod 2.1 OM changes
*
* Revision 1.25 2005/11/21 18:38:31 georgeda
* Defect #35. Trim whitespace from items that are freeform text
*
* Revision 1.24 2005/11/15 22:13:46 georgeda
* Cleanup of drug screening
*
* Revision 1.23 2005/11/14 14:21:44 georgeda
* Added sorting and spontaneous mutation
*
* Revision 1.22 2005/11/11 18:39:30 georgeda
* Removed unneeded call
*
* Revision 1.21 2005/11/10 22:07:36 georgeda
* Fixed part of bug #21
*
* Revision 1.20 2005/11/10 18:12:23 georgeda
* Use constant
*
* Revision 1.19 2005/11/07 13:57:39 georgeda
* Minor tweaks
*
* Revision 1.18 2005/11/03 15:47:11 georgeda
* Fixed slow invivo results
*
* Revision 1.17 2005/10/27 18:13:48 guruswas
* Show all publications in the publications display page.
*
* Revision 1.16 2005/10/20 21:35:37 georgeda
* Fixed xenograft display bug
*
* Revision 1.15 2005/10/19 18:56:00 guruswas
* implemented invivo details page
*
* Revision 1.14 2005/10/11 18:15:25 georgeda
* More comment changes
*
* Revision 1.13 2005/10/10 14:12:24 georgeda
* Changes for comment curation
*
* Revision 1.12 2005/10/07 21:15:03 georgeda
* Added caarray variables
*
* Revision 1.11 2005/10/06 13:37:01 georgeda
* Removed informational message
*
* Revision 1.10 2005/09/30 18:42:24 guruswas
* intial implementation of drug screening search and display page
*
* Revision 1.9 2005/09/22 21:34:51 guruswas
* First stab at carcinogenic intervention pages
*
* Revision 1.8 2005/09/22 15:23:41 georgeda
* Cleaned up warnings
*
* Revision 1.7 2005/09/21 21:02:24 guruswas
* Display the organ, disease names from NCI Thesaurus
*
* Revision 1.6 2005/09/21 20:47:16 georgeda
* Cleaned up
*
* Revision 1.5 2005/09/16 19:30:00 guruswas
* Display invivo data (from DTP) in the therapuetic approaches page
*
* Revision 1.4 2005/09/16 15:52:56 georgeda
* Changes due to manager re-write
*
*
*/
package gov.nih.nci.camod.webapp.action;
import edu.wustl.common.util.CaElmirInterfaceManager;
import gov.nih.nci.cabio.domain.Gene;
import gov.nih.nci.camod.Constants;
import gov.nih.nci.camod.domain.Agent;
import gov.nih.nci.camod.domain.AnimalModel;
import gov.nih.nci.camod.domain.CaelmirStudyData;
import gov.nih.nci.camod.domain.CarcinogenExposure;
import gov.nih.nci.camod.domain.Comments;
import gov.nih.nci.camod.domain.EngineeredGene;
import gov.nih.nci.camod.domain.GeneIdentifier;
import gov.nih.nci.camod.domain.GenomicSegment;
import gov.nih.nci.camod.domain.Transplantation;
import gov.nih.nci.camod.domain.InducedMutation;
import gov.nih.nci.camod.domain.Person;
import gov.nih.nci.camod.domain.SpontaneousMutation;
import gov.nih.nci.camod.domain.TargetedModification;
import gov.nih.nci.camod.domain.Therapy;
import gov.nih.nci.camod.domain.Transgene;
import gov.nih.nci.camod.service.AgentManager;
import gov.nih.nci.camod.service.AnimalModelManager;
import gov.nih.nci.camod.service.CommentsManager;
import gov.nih.nci.camod.service.PersonManager;
import gov.nih.nci.camod.service.TransplantationManager;
import gov.nih.nci.camod.service.impl.QueryManagerSingleton;
import gov.nih.nci.camod.util.SafeHTMLUtil;
import gov.nih.nci.common.domain.DatabaseCrossReference;
import gov.nih.nci.system.applicationservice.CaBioApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
public class ViewModelAction extends BaseAction
{
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
*/
private void setCancerModel(HttpServletRequest request)
{
String modelID = request.getParameter(Constants.Parameters.MODELID);
log.debug("<setCancerModel> modelID: " + modelID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = null;
try
{
am = animalModelManager.get(modelID);
}
catch (Exception e)
{
log.error("Unable to get cancer model in setCancerModel");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
// Set model id to display on subViewModelMenu on left menu bar
request.getSession().setAttribute(Constants.MODELID, am.getId().toString());
}
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
* @throws Exception
*/
private void setComments(HttpServletRequest request,
String inSection) throws Exception
{
String theCommentsId = request.getParameter(Constants.Parameters.COMMENTSID);
CommentsManager theCommentsManager = (CommentsManager) getBean("commentsManager");
log.debug("Comments id: " + theCommentsId);
List<Comments> theCommentsList = new ArrayList<Comments>();
if (theCommentsId != null && theCommentsId.length() > 0)
{
Comments theComments = theCommentsManager.get(theCommentsId);
if (theComments != null)
{
log.debug("Found a comment: " + theComments.getRemark());
theCommentsList.add(theComments);
}
}
// Get all comments that are either approved or owned by this user
else
{
PersonManager thePersonManager = (PersonManager) getBean("personManager");
Person theCurrentUser = thePersonManager.getByUsername((String) request.getSession().getAttribute(Constants.CURRENTUSER));
AnimalModel theAnimalModel = (AnimalModel) request.getSession().getAttribute(Constants.ANIMALMODEL);
theCommentsList = theCommentsManager.getAllBySection(inSection, theCurrentUser, theAnimalModel);
}
request.setAttribute(Constants.Parameters.COMMENTSLIST, theCommentsList);
}
/**
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward populateModelCharacteristics(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
request.getSession(true);
try {
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.debug("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// get and clean header to prevent SQL injection
if (request.getHeader("Referer") != null){
sID = request.getHeader("Referer");
log.debug("cleaned Referer: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.debug("populateModelCharacteristics headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("populateModelCharacteristics cleaned headername: " + headername);
}
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.info("methodName: " + methodName);
if (!methodName.equals("populateModelCharacteristics")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("cleaned methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.MODEL_CHARACTERISTICS);
// Call method so therapy link displays for models with caELMIR-only data
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
}
catch (Exception e)
{
log.error("Error in populateModelCharacteristics", e);
}
return mapping.findForward("viewModelCharacteristics");
}
/**
*
*/
public ActionForward populateEngineeredGene(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateEngineeredGene> modelID" + request.getParameter("aModelID"));
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateEngineeredGene")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("aModelID");
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set egc = am.getEngineeredGeneCollection();
final int egcCnt = (egc != null) ? egc.size() : 0;
final List<EngineeredGene> tgc = new ArrayList<EngineeredGene>();
int tgCnt = 0;// Transgene
final List<EngineeredGene> gsc = new ArrayList<EngineeredGene>();
int gsCnt = 0;// GenomicSegment
final List<EngineeredGene> tmc = new ArrayList<EngineeredGene>();
int tmCnt = 0;// TargetedModification
final Map<Long, Gene> tmGeneMap = new HashMap<Long, Gene>();
final List<EngineeredGene> imc = new ArrayList<EngineeredGene>();
final List<SpontaneousMutation> smc = new ArrayList<SpontaneousMutation>(am.getSpontaneousMutationCollection());
Iterator it = egc.iterator();
int imCnt = 0;// InducedMutation
while (it.hasNext())
{
EngineeredGene eg = (EngineeredGene) it.next();
if (eg instanceof Transgene)
{
tgc.add(eg);
tgCnt++;
}
else if (eg instanceof GenomicSegment)
{
gsc.add(eg);
gsCnt++;
}
else if (eg instanceof TargetedModification)
{
tmc.add(eg);
tmCnt++;
// now go to caBIO and query the gene object....
TargetedModification tm = (TargetedModification) eg;
GeneIdentifier geneIdentifier = tm.getGeneIdentifier();
if (geneIdentifier != null)
{
log.info("Connecting to caBIO to look up gene " + geneIdentifier);
// the geneId is available
try
{
CaBioApplicationService appService = (CaBioApplicationService)ApplicationServiceProvider.getApplicationService();
log.info("appService: " + appService.toString());
DatabaseCrossReference dcr = new DatabaseCrossReference();
dcr.setCrossReferenceId(geneIdentifier.getEntrezGeneID());
dcr.setType("gov.nih.nci.cabio.domain.Gene");
dcr.setDataSourceName("LOCUS_LINK_ID");
List<DatabaseCrossReference> cfcoll = new ArrayList<DatabaseCrossReference>();
cfcoll.add(dcr);
log.info("cfcoll.size(): " + cfcoll.size());
Gene myGene = new Gene();
myGene.setDatabaseCrossReferenceCollection(cfcoll);
List resultList = appService.search(Gene.class, myGene);
log.info("resultList.size(): " + resultList.size());
final int geneCount = (resultList != null) ? resultList.size() : 0;
log.info("Got " + geneCount + " Gene Objects");
if (geneCount > 0)
{
myGene = (Gene) resultList.get(0);
log.info("Gene:" + geneIdentifier + " ==>" + myGene);
tmGeneMap.put(tm.getId(), myGene);
}
}
catch (Exception e)
{
log.error("ViewModelAction Unable to get information from caBIO", e);
}
}
}
else if (eg instanceof InducedMutation)
{
imc.add(eg);
imCnt++;
}
}
log.info("<populateEngineeredGene> " + "egcCnt=" + egcCnt + "tgc=" + tgCnt + "gsc=" + gsCnt + "tmc=" + tmCnt + "imc=" + imCnt);
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
request.getSession().setAttribute(Constants.TRANSGENE_COLL, tgc);
request.getSession().setAttribute(Constants.GENOMIC_SEG_COLL, gsc);
request.getSession().setAttribute(Constants.TARGETED_MOD_COLL, tmc);
request.getSession().setAttribute(Constants.TARGETED_MOD_GENE_MAP, tmGeneMap);
request.getSession().setAttribute(Constants.INDUCED_MUT_COLL, imc);
request.getSession().setAttribute(Constants.SPONTANEOUS_MUT_COLL, smc);
log.debug("<populateEngineeredGene> set attributes done.");
setComments(request, Constants.Pages.GENETIC_DESCRIPTION);
return mapping.findForward("viewGeneticDescription");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCarcinogenicInterventions(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCarcinogenicInterventions")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set ceColl = am.getCarcinogenExposureCollection();
Iterator it = ceColl.iterator();
final Map<String, List<Object>> interventionTypeMap = new HashMap<String, List<Object>>();
while (it.hasNext())
{
CarcinogenExposure ce = (CarcinogenExposure) it.next();
if (ce != null)
{
log.debug("Checking agent:" + ce.getEnvironmentalFactor().getNscNumber());
String theType = ce.getEnvironmentalFactor().getType();
if (theType == null || theType.length() == 0)
{
theType = ce.getEnvironmentalFactor().getTypeAlternEntry();
if (theType == null || theType.length() == 0)
{
theType = "Not specified";
}
}
List<Object> theTypeColl = interventionTypeMap.get(theType);
if (theTypeColl == null)
{
theTypeColl = new ArrayList<Object>();
interventionTypeMap.put(theType, theTypeColl);
}
theTypeColl.add(ce);
}
}
if (am.getGeneDeliveryCollection().size() > 0)
{
List<Object> theGeneDeliveryCollection = new ArrayList<Object>(am.getGeneDeliveryCollection());
interventionTypeMap.put("GeneDelivery", theGeneDeliveryCollection);
}
request.getSession().setAttribute(Constants.CARCINOGENIC_INTERVENTIONS_COLL, interventionTypeMap);
setComments(request, Constants.Pages.CARCINOGENIC_INTERVENTION);
return mapping.findForward("viewCarcinogenicInterventions");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populatePublications(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populatePublications")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter("aModelID");
List pubs = null;
try
{
pubs = QueryManagerSingleton.instance().getAllPublications(Long.valueOf(modelID).longValue());
log.debug("pubs.size(): " + pubs.size());
}
catch (Exception e)
{
log.error("Unable to get publications");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.PUBLICATIONS, pubs);
setComments(request, Constants.Pages.PUBLICATIONS);
return mapping.findForward("viewPublications");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateHistopathology(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateHistopathology")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.HISTOPATHOLOGY);
return mapping.findForward("viewHistopathology");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTherapeuticApproaches(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.info("<ViewModelAction> populateTherapeuticApproaches");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTherapeuticApproaches")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//
// query caBIO and load clinical protocols information
// store clinicalProtocol info in a hashmap keyed by NSC#
//
final HashMap<Long, Collection> clinProtocols = new HashMap<Long, Collection>();
final HashMap<Long, Collection> yeastResults = new HashMap<Long, Collection>();
final HashMap<Long, Collection> invivoResults = new HashMap<Long, Collection>();
final List<Therapy> therapeuticApprochesColl = new ArrayList<Therapy>();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set therapyColl = am.getTherapyCollection();
Iterator it = therapyColl.iterator();
final int cc = (therapyColl != null) ? therapyColl.size() : 0;
log.info("Looking up clinical protocols for " + cc + " agents...");
while (it.hasNext())
{
Therapy t = (Therapy) it.next();
if (t != null)
{
therapeuticApprochesColl.add(t);
}
// Sort therapy in order entered as requested by user
Collections.sort(therapeuticApprochesColl);
log.info("therapeuticApprochesColl: " + therapeuticApprochesColl.toString());
Agent a = t.getAgent();
AgentManager myAgentManager = (AgentManager) getBean("agentManager");
if (a != null)
{
Long nscNumber = a.getNscNumber();
if (nscNumber != null)
log.info("nscNumber: " + nscNumber);
{
Collection protocols = myAgentManager.getClinicalProtocols(a);
clinProtocols.put(nscNumber, protocols);
log.info("clinProtocols.size(): " + clinProtocols.size());
// get the yeast data
List yeastStages = myAgentManager.getYeastResults(a, true);
if (yeastStages.size() > 0)
{
yeastResults.put(a.getId(), yeastStages);
}
// now get invivo/Transplantation data
List transplantationResults = QueryManagerSingleton.instance().getInvivoResults(a, true);
invivoResults.put(a.getId(), transplantationResults);
}
}
}
request.getSession().setAttribute(Constants.THERAPEUTIC_APPROACHES_COLL, therapeuticApprochesColl);
request.getSession().setAttribute(Constants.CLINICAL_PROTOCOLS, clinProtocols);
request.getSession().setAttribute(Constants.YEAST_DATA, yeastResults);
request.getSession().setAttribute(Constants.INVIVO_DATA, invivoResults);
setComments(request, Constants.Pages.THERAPEUTIC_APPROACHES);
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCaelmirTherapyDetails(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCaelmirTherapyDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
JSONArray jsonArray = new JSONArray();
JSONObject jobj = new JSONObject();
Vector h = new Vector();
ArrayList caelmirStudyData = new ArrayList();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager theAnimalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel theAnimalModel = theAnimalModelManager.get(modelID);
try {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter try");
// Link to the inteface provided by caElmir
URL url = new URL("http://chichen-itza.compmed.ucdavis.edu:8080/"
+ CaElmirInterfaceManager.getStudyInfoUrl());
// set your proxy server and port
//System.setProperty("proxyHost", "ptproxy.persistent.co.in");
//System.setProperty("proxyPort", "8080");
URLConnection urlConnection = url.openConnection();
//log.debug("populateCaelmirTherapyDetails open connection");
// needs to be set to True for writing to the output stream.This
// allows to pass data to the url.
urlConnection.setDoOutput(true);
JSONObject jsonObj = new JSONObject();
// setting the model id.
jsonObj.put(CaElmirInterfaceManager.getModelIdParameter(), modelID);
PrintWriter out = new PrintWriter(urlConnection.getOutputStream());
out.write(jsonObj.toString());
out.flush();
out.close();
//log.debug("populateCaelmirTherapyDetails created JSONObject");
// start reading the responce
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(urlConnection.getInputStream()));
if (bufferedReader != null) {
String resultStr = (String) bufferedReader.readLine();
jsonArray = new JSONArray(resultStr);
String status = null;
status = ((JSONObject) jsonArray.get(0)).get(
CaElmirInterfaceManager.getStatusMessageKey())
.toString();
//log.debug("populateCaelmirTherapyDetails status: " + status);
// Imporatant:first check for the status
if (!CaElmirInterfaceManager.getSuccessKey().equals(status)) {
// prints the status
log.info(status);
}
CaelmirStudyData studyData = new CaelmirStudyData();
// start reading study data from index 1
for (int i = 1; i < jsonArray.length(); i++) {
jobj = (JSONObject) jsonArray.get(i);
studyData = new CaelmirStudyData();
studyData.setDescription(jobj.getString(CaElmirInterfaceManager.getStudyDesrciptionKey()));
studyData.setEmail(jobj.getString(CaElmirInterfaceManager.getEmailKey()));
studyData.setHypothesis(jobj.getString(CaElmirInterfaceManager.getStudyHypothesisKey()));
studyData.setInstitution(jobj.getString(CaElmirInterfaceManager.getInstitutionKey()));
studyData.setInvestigatorName(jobj.getString(CaElmirInterfaceManager.getPrimaryInvestigatorKey()));
studyData.setStudyName(jobj.getString(CaElmirInterfaceManager.getStudyName()));
studyData.setUrl(jobj.getString(CaElmirInterfaceManager.getStudyUrlKey()));
caelmirStudyData.add(studyData);
}
}
} catch (MalformedURLException me) {
log.debug("MalformedURLException: " + me);
} catch (IOException ioe) {
log.debug("IOException: " + ioe);
}
// Set collection so therapy link will display if caELMIR data is available
// Needed for models with caELMIR data but no caMOD data
theAnimalModel.setCaelmirStudyDataCollection(caelmirStudyData);
request.getSession().setAttribute(Constants.CAELMIR_STUDY_DATA,
caelmirStudyData);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCellLines(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCellLines")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.CELL_LINES);
return mapping.findForward("viewCellLines");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransientInterference(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransientInterference")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSIENT_INTERFERENCE);
return mapping.findForward("viewTransientInterference");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateImages(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateImages")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.IMAGES);
return mapping.findForward("viewImages");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateMicroarrays(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateMicroarrays")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//Get external properties file
Properties camodProperties = new Properties();
String camodPropertiesFileName = null;
camodPropertiesFileName = System.getProperty("gov.nih.nci.camod.camodProperties");
try {
FileInputStream in = new FileInputStream(camodPropertiesFileName);
camodProperties.load(in);
}
catch (FileNotFoundException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
} catch (IOException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
}
request.setAttribute("uri_start", camodProperties.getProperty("caarray.uri_start"));
request.setAttribute("uri_end", camodProperties.getProperty("caarray.uri_end"));
setComments(request, Constants.Pages.MICROARRAY);
return mapping.findForward("viewMicroarrays");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantation(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantation> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantation")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSPLANTATION);
log.debug("<populateTransplantation> Exit:");
return mapping.findForward("viewTransplantation");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantationDetails(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantationDetails> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantationDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("tModelID");
request.getSession().setAttribute(Constants.MODELID, modelID);
String nsc = request.getParameter("nsc");
if (nsc != null && nsc.length() == 0)
return mapping.findForward("viewModelCharacteristics");
log.debug("<populateTransplantationDetails> modelID:" + modelID);
log.debug("<populateTransplantationDetails> nsc:" + nsc);
TransplantationManager mgr = (TransplantationManager) getBean("transplantationManager");
Transplantation t = mgr.get(modelID);
request.getSession().setAttribute(Constants.TRANSPLANTATIONMODEL, t);
request.getSession().setAttribute(Constants.NSC_NUMBER, nsc);
request.getSession().setAttribute(Constants.TRANSPLANTATIONRESULTLIST, t.getInvivoResultCollectionByNSC(nsc));
return mapping.findForward("viewInvivoDetails");
}
}
|
Modified for JIRA# Bug CAMOD-975 Therapeutic approaches search page not shown - user gets blank page
SVN-Revision: 5678
|
software/camod/src/gov/nih/nci/camod/webapp/action/ViewModelAction.java
|
Modified for JIRA# Bug CAMOD-975 Therapeutic approaches search page not shown - user gets blank page
|
|
Java
|
mit
|
e4ec9a4a06d8afaac8d5d2ab54786615fbc9a61e
| 0
|
gzsnail/acm-study
|
import java.io.*;
import java.util.*;
public class p2{
public static void main(String args[])throws Exception{
Scanner scin = new Scanner(System.in);
int a=0, b=0, c=0, t=0;
t = scin.nextInt();
do{
String str = scin.next();
for (int i=0; i<str.length();i++){
if(str.charAt(i)=='('){
a=a+1;
if(i+1<str.length()&&str.charAt(i+1)==']')a=a+1;
}
if(str.charAt(i)==')') a=a-1;
if(str.charAt(i)=='['){
b=b+1;
if(i+1<str.length()&&str.charAt(i+1)==')')b=b+1;
}
if(str.charAt(i)==']') b=b-1;
}
if(a==0&&b==0){
System.out.println("Yes");
}else{
System.out.println("No");
}
c++;
a=0;
b=0;
}while (c<t);
}
}
|
src/p2.java
|
import java.io.*;
import java.util.*;
public class p2{
public static void main(String args[])throws Exception{
Scanner scin = new Scanner(System.in);
int a=0, b=0, c=0, t=0;
t = scin.nextInt();
do{
String str = scin.nextString();
for (int i=0; i<str.length();i++){
if(str.charAt(i)=='(') a=a+1;
if(str.charAt(i)==')') a=a-1;
if(str.charAt(i)=='[') b=b+1;
if(str.charAt(i)==']') b=b-1;
}
if(a==0&&b==0){
System.out.println("Yes");
}else{
System.out.println("No");
}
c++;
a=0;
b=0;
}
}while (c<t)
}
|
Update p2.java
|
src/p2.java
|
Update p2.java
|
|
Java
|
mit
|
34dbd43b01a6f9998bbd87333d5c51a276d8323c
| 0
|
fvoichick/ColoredPlayerNames
|
package com.finnv3.coloredplayernames;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.logging.Level;
import org.bukkit.ChatColor;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.AsyncPlayerChatEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.scoreboard.Scoreboard;
import org.bukkit.scoreboard.Team;
import org.mcstats.MetricsLite;
import net.gravitydevelopment.updater.Updater;
/**
* The main (and only) ColoredPlayerNames class
*
* @author Finn Voichick
*/
public final class ColoredPlayerNames extends JavaPlugin implements Listener {
private Scoreboard scoreboard;
private Map<ChatColor, Double> weights;
private Map<UUID, ChatColor> playerColors;
private Random random;
@Override
public void onEnable() {
saveDefaultConfig();
if (getConfig().getBoolean("auto-update")) {
new Updater(this, id, getFile(), Updater.UpdateType.DEFAULT, false);
}
try {
MetricsLite metrics = new MetricsLite(this);
metrics.start();
} catch (IOException e) {
getLogger().log(Level.WARNING, "Failed to submit stats to mcstats.org", e);
}
scoreboard = getServer().getScoreboardManager().getNewScoreboard();
weights = new HashMap<ChatColor, Double>();
ConfigurationSection colorSection = getConfig().getConfigurationSection("colors");
for (String colorName : colorSection.getKeys(false)) {
ConfigurationSection singleColor = colorSection.getConfigurationSection(colorName);
weights.put(ChatColor.getByChar(singleColor.getString("code")), singleColor.getDouble("weight"));
}
playerColors = new HashMap<UUID, ChatColor>(16);
random = new Random();
getServer().getPluginManager().registerEvents(this, this);
for (Player player : getServer().getOnlinePlayers()) {
colorPlayer(player);
}
}
@Override
public void onDisable() {
for (Player player : getServer().getOnlinePlayers()) {
uncolorPlayer(player);
}
}
@EventHandler
public void onPlayerJoin(PlayerJoinEvent event) {
Player player = event.getPlayer();
colorPlayer(player);
event.setJoinMessage(player.getDisplayName() + ChatColor.YELLOW + " joined the game.");
}
private ChatColor pickColor(Player player) {
List<ChatColor> availableColors = new ArrayList<ChatColor>(16);
Map<ChatColor, Integer> colorsInUse = new EnumMap<ChatColor, Integer>(ChatColor.class);
for (ChatColor color : weights.keySet()) {
colorsInUse.put(color, 0);
}
for (ChatColor color : playerColors.values()) {
colorsInUse.put(color, colorsInUse.get(color) + 1);
}
int lowestNumber = Integer.MAX_VALUE;
for (ChatColor color : weights.keySet()) {
int occurences = colorsInUse.get(color);
if (occurences <= lowestNumber) {
if (occurences < lowestNumber) {
lowestNumber = occurences;
availableColors.clear();
}
availableColors.add(color);
}
}
if (availableColors.isEmpty()) {
return ChatColor.RESET;
}
double weightTotal = 0.0;
for (ChatColor color : availableColors) {
weightTotal += weights.get(color);
}
double randomNumber = random.nextDouble();
double probability = 0.0;
for (ChatColor color : availableColors) {
probability += weights.get(color) / weightTotal;
if (randomNumber < probability) {
return color;
}
}
return availableColors.get(random.nextInt(availableColors.size()));
}
private void colorPlayer(Player player) {
ChatColor color = getPermColor(player);
if (color == null) {
color = pickColor(player);
}
colorPlayer(player, color);
}
private void colorPlayer(Player player, ChatColor color) {
playerColors.put(player.getUniqueId(), color);
player.setDisplayName(color + player.getName() + ChatColor.RESET);
Team team = scoreboard.registerNewTeam(player.getName());
team.setDisplayName(player.getName());
team.setPrefix(color.toString());
team.setSuffix(ChatColor.RESET.toString());
team.addEntry(player.getName());
player.setScoreboard(scoreboard);
}
private void uncolorPlayer(Player player) {
scoreboard.getTeam(player.getName()).unregister();
player.setDisplayName(player.getName());
}
private ChatColor getPermColor(Player player) {
for (ChatColor color : weights.keySet()) {
if (player.hasPermission("coloredplayernames." + color.name())) {
return color;
}
}
return null;
}
@EventHandler
public void onAsyncPlayerChat(AsyncPlayerChatEvent event) {
Player player = event.getPlayer();
event.setFormat(ChatColor.GRAY + "<" + player.getDisplayName() + ChatColor.GRAY + "> " + ChatColor.RESET
+ event.getMessage());
}
@EventHandler
public void onPlayerQuit(PlayerQuitEvent event) {
event.setQuitMessage(event.getPlayer().getDisplayName() + ChatColor.YELLOW + " left the game.");
uncolorPlayer(event.getPlayer());
}
private static final int id = 80947;
}
|
src/com/finnv3/coloredplayernames/ColoredPlayerNames.java
|
package com.finnv3.coloredplayernames;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.logging.Level;
import org.bukkit.ChatColor;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.AsyncPlayerChatEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.scoreboard.Scoreboard;
import org.bukkit.scoreboard.Team;
import org.mcstats.MetricsLite;
import net.gravitydevelopment.updater.Updater;
/**
* The main (and only) ColoredPlayerNames class
*
* @author Finn Voichick
*/
public final class ColoredPlayerNames extends JavaPlugin implements Listener {
private Scoreboard scoreboard;
private Map<ChatColor, Double> weights;
private Map<UUID, ChatColor> playerColors;
private Random random;
@Override
public void onEnable() {
saveDefaultConfig();
if (getConfig().getBoolean("auto-update")) {
new Updater(this, id, getFile(), Updater.UpdateType.DEFAULT, false);
}
try {
MetricsLite metrics = new MetricsLite(this);
metrics.start();
} catch (IOException e) {
getLogger().log(Level.WARNING, "Failed to submit stats to mcstats.org", e);
}
scoreboard = getServer().getScoreboardManager().getNewScoreboard();
weights = new HashMap<ChatColor, Double>();
ConfigurationSection colorSection = getConfig().getConfigurationSection("colors");
for (String colorName : colorSection.getKeys(false)) {
ConfigurationSection singleColor = colorSection.getConfigurationSection(colorName);
weights.put(ChatColor.getByChar(singleColor.getString("code")), singleColor.getDouble("weight"));
}
playerColors = new HashMap<UUID, ChatColor>(16);
random = new Random();
getServer().getPluginManager().registerEvents(this, this);
for (Player player : getServer().getOnlinePlayers()) {
colorPlayer(player);
}
}
@Override
public void onDisable() {
for (Player player : getServer().getOnlinePlayers()) {
uncolorPlayer(player);
}
}
@EventHandler
public void onPlayerJoin(PlayerJoinEvent event) {
Player player = event.getPlayer();
colorPlayer(player);
event.setJoinMessage(player.getDisplayName() + ChatColor.YELLOW + " joined the game.");
}
private ChatColor pickColor(Player player) {
List<ChatColor> availableColors = new ArrayList<ChatColor>(16);
Map<ChatColor, Integer> colorsInUse = new EnumMap<ChatColor, Integer>(ChatColor.class);
for (ChatColor color : weights.keySet()) {
colorsInUse.put(color, 0);
}
for (ChatColor color : playerColors.values()) {
colorsInUse.put(color, colorsInUse.get(color) + 1);
}
int lowestNumber = Integer.MAX_VALUE;
for (ChatColor color : weights.keySet()) {
int occurences = colorsInUse.get(color);
if (occurences <= lowestNumber) {
if (occurences < lowestNumber) {
lowestNumber = occurences;
availableColors.clear();
}
availableColors.add(color);
}
}
double weightTotal = 0.0;
for (ChatColor color : availableColors) {
weightTotal += weights.get(color);
}
double randomNumber = random.nextDouble();
double probability = 0.0;
for (ChatColor color : availableColors) {
probability += weights.get(color) / weightTotal;
if (randomNumber < probability) {
return color;
}
}
throw new AssertionError();
}
private void colorPlayer(Player player) {
ChatColor color = getPermColor(player);
if (color == null) {
color = pickColor(player);
}
colorPlayer(player, color);
}
private void colorPlayer(Player player, ChatColor color) {
playerColors.put(player.getUniqueId(), color);
player.setDisplayName(color + player.getName() + ChatColor.RESET);
Team team = scoreboard.registerNewTeam(player.getName());
team.setDisplayName(player.getName());
team.setPrefix(color.toString());
team.setSuffix(ChatColor.RESET.toString());
team.addEntry(player.getName());
player.setScoreboard(scoreboard);
}
private void uncolorPlayer(Player player) {
scoreboard.getTeam(player.getName()).unregister();
player.setDisplayName(player.getName());
}
private ChatColor getPermColor(Player player) {
for (ChatColor color : weights.keySet()) {
if (player.hasPermission("coloredplayernames." + color.name())) {
return color;
}
}
return null;
}
@EventHandler
public void onAsyncPlayerChat(AsyncPlayerChatEvent event) {
Player player = event.getPlayer();
event.setFormat(ChatColor.GRAY + "<" + player.getDisplayName() + ChatColor.GRAY + "> " + ChatColor.RESET
+ event.getMessage());
}
@EventHandler
public void onPlayerQuit(PlayerQuitEvent event) {
event.setQuitMessage(event.getPlayer().getDisplayName() + ChatColor.YELLOW + " left the game.");
uncolorPlayer(event.getPlayer());
}
private static final int id = 80947;
}
|
Caught strange cases to prevent runtime exceptions
|
src/com/finnv3/coloredplayernames/ColoredPlayerNames.java
|
Caught strange cases to prevent runtime exceptions
|
|
Java
|
mit
|
c05a9fc381b337289f59ecaf2da2d4cd0f9d7931
| 0
|
ronaldosvieira/tcc
|
package mind.need;
import entity.Animal;
import mind.goal.*;
import mind.sense.Perception;
public class Hunger extends Need {
private String diet;
public Hunger(Animal animal, double decayPerMinute, double value) {
super(animal, decayPerMinute, value);
diet = getAnimal().getSemantic().get("diet", String.class);
if (diet.equals("herbivorous")) diet = "plant";
else if (diet.equals("carnivorous")) diet = "animal";
else diet = "thing";
}
public Hunger(Animal animal, double decayRate) {
super(animal, decayRate);
}
@Override
public String getName() {return "hunger";}
@Override
public void decay(float delta) {
super.decay(delta);
if (getValue() >= 1.0) getAnimal().attack();
}
@Override
public Goal getGoal() {
return new GoalChain(new FindFood(getAnimal()))
.then(new MoveTo(getAnimal()))
.then(new Attack(getAnimal()))
.then(new Eat(getAnimal()))
.get();
}
@Override
public double evaluate(Perception perception) {
return perception.isA(diet)
&& perception.get("size", Double.class)
< getAnimal().getSemantic().get("size", Double.class)?
1 : 0;
}
}
|
src/mind/need/Hunger.java
|
package mind.need;
import entity.Animal;
import mind.goal.*;
import mind.sense.Perception;
public class Hunger extends Need {
public Hunger(Animal animal, double decayPerMinute, double value) {
super(animal, decayPerMinute, value);
}
public Hunger(Animal animal, double decayRate) {
super(animal, decayRate);
}
@Override
public String getName() {return "hunger";}
@Override
public void decay(float delta) {
super.decay(delta);
if (getValue() >= 1.0) getAnimal().attack();
}
@Override
public Goal getGoal() {
return new GoalChain(new FindFood(getAnimal()))
.then(new MoveTo(getAnimal()))
.then(new Attack(getAnimal()))
.then(new Eat(getAnimal()))
.get();
}
@Override
public double evaluate(Perception perception) {
return perception.isA("animal")
&& perception.get("size", Double.class)
< getAnimal().getSemantic().get("size", Double.class)?
1 : 0;
}
}
|
Diet-based evaluation on hunger
|
src/mind/need/Hunger.java
|
Diet-based evaluation on hunger
|
|
Java
|
cc0-1.0
|
1b08d617b53428b31a5ce9a61c3c500896dcc6db
| 0
|
ConnectSDK/Connect-SDK-Android-API-Sampler
|
//
// Connect SDK Sample App by LG Electronics
//
// To the extent possible under law, the person who associated CC0 with
// this sample app has waived all copyright and related or neighboring rights
// to the sample app.
//
// You should have received a copy of the CC0 legalcode along with this
// work. If not, see http://creativecommons.org/publicdomain/zero/1.0/.
//
package com.connectsdk.sampler.fragments;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;
import com.connectsdk.core.MediaInfo;
import com.connectsdk.core.SubtitleInfo;
import com.connectsdk.device.ConnectableDevice;
import com.connectsdk.sampler.R;
import com.connectsdk.sampler.util.TestResponseObject;
import com.connectsdk.service.capability.MediaControl;
import com.connectsdk.service.capability.MediaControl.DurationListener;
import com.connectsdk.service.capability.MediaControl.PlayStateListener;
import com.connectsdk.service.capability.MediaControl.PlayStateStatus;
import com.connectsdk.service.capability.MediaControl.PositionListener;
import com.connectsdk.service.capability.MediaPlayer;
import com.connectsdk.service.capability.MediaPlayer.MediaInfoListener;
import com.connectsdk.service.capability.MediaPlayer.MediaLaunchObject;
import com.connectsdk.service.capability.PlaylistControl;
import com.connectsdk.service.capability.VolumeControl;
import com.connectsdk.service.capability.VolumeControl.VolumeListener;
import com.connectsdk.service.capability.listeners.ResponseListener;
import com.connectsdk.service.command.ServiceCommandError;
import com.connectsdk.service.sessions.LaunchSession;
import java.io.InputStream;
import java.util.Locale;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
public class MediaPlayerFragment extends BaseFragment {
public static final String URL_SUBTITLES_WEBVTT =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/sintel_en.vtt";
public static final String URL_SUBTITLE_SRT =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/sintel_en.srt";
public static final String URL_VIDEO_MP4 =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/video.mp4";
public static final String URL_IMAGE_ICON =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/videoIcon.jpg";
public Button photoButton;
public Button videoButton;
public Button audioButton;
public Button playButton;
public Button pauseButton;
public Button stopButton;
public Button rewindButton;
public Button fastForwardButton;
public Button closeButton;
public Button mediaInfoButton;
public Button playlistButton;
public Button previousButton;
public Button nextButton;
public Button jumpButton;
public CheckBox loopingButton;
public CheckBox subtitlesButton;
public LaunchSession launchSession;
public TextView positionTextView;
public TextView durationTextView;
public TextView mediaInfoTextView;
public SeekBar mSeekBar;
public boolean mIsUserSeeking;
public SeekBar mVolumeBar;
public EditText positionTrackView;
public ImageView mediaInfoImageView;
public boolean mSeeking;
public Runnable mRefreshRunnable;
public final int REFRESH_INTERVAL_MS = (int) TimeUnit.SECONDS.toMillis(1);
public Handler mHandler;
public long totalTimeDuration;
public boolean mIsGettingPlayPosition;
boolean isPlayingImage = false;
boolean isPlaying = false;
private MediaControl mMediaControl = null;
private PlaylistControl mPlaylistControl = null;
private Timer refreshTimer;
public TestResponseObject testResponse;
public MediaPlayerFragment() {};
public MediaPlayerFragment(Context context)
{
super(context);
mIsUserSeeking = false;
mSeeking = false;
mIsGettingPlayPosition = false;
testResponse = new TestResponseObject();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
setRetainInstance(true);
View rootView = inflater.inflate(
R.layout.fragment_media_player, container, false);
photoButton = (Button) rootView.findViewById(R.id.photoButton);
videoButton = (Button) rootView.findViewById(R.id.videoButton);
audioButton = (Button) rootView.findViewById(R.id.audioButton);
playButton = (Button) rootView.findViewById(R.id.playButton);
pauseButton = (Button) rootView.findViewById(R.id.pauseButton);
stopButton = (Button) rootView.findViewById(R.id.stopButton);
rewindButton = (Button) rootView.findViewById(R.id.rewindButton);
fastForwardButton = (Button) rootView.findViewById(R.id.fastForwardButton);
closeButton = (Button) rootView.findViewById(R.id.closeButton);
mediaInfoButton = (Button) rootView.findViewById(R.id.mediaInfo_button);
playlistButton = (Button) rootView.findViewById(R.id.playlistButton);
previousButton = (Button) rootView.findViewById(R.id.previousButton);
nextButton = (Button) rootView.findViewById(R.id.nextButton);
jumpButton = (Button) rootView.findViewById(R.id.jumpButton);
loopingButton = (CheckBox) rootView.findViewById(R.id.loopingButton);
subtitlesButton = (CheckBox) rootView.findViewById(R.id.subtitlesButton);
positionTextView = (TextView) rootView.findViewById(R.id.stream_position);
durationTextView = (TextView) rootView.findViewById(R.id.stream_duration);
mediaInfoTextView = (TextView) rootView.findViewById(R.id.mediaInfo_textView);
mSeekBar = (SeekBar) rootView.findViewById(R.id.stream_seek_bar);
mVolumeBar = (SeekBar) rootView.findViewById(R.id.volume_seek_bar);
positionTrackView = (EditText) rootView.findViewById(R.id.positionText);
mediaInfoImageView = (ImageView) rootView.findViewById(R.id.mediaInfo_imageView);
buttons = new Button[] {
photoButton,
videoButton,
audioButton,
playButton,
pauseButton,
stopButton,
rewindButton,
fastForwardButton,
closeButton,
mediaInfoButton,
playlistButton,
previousButton,
nextButton,
jumpButton,
loopingButton,
subtitlesButton,
};
mHandler = new Handler();
return rootView;
}
@Override
public void setTv(ConnectableDevice tv) {
super.setTv(tv);
if (tv == null) {
stopUpdating();
mMediaControl = null;
mPlaylistControl = null;
}
}
@Override
public void onResume() {
super.onResume();
if (isPlaying) {
startUpdating();
}
}
@Override
public void onPause() {
stopUpdating();
super.onPause();
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Paused_Media);
}
@Override
public void enableButtons()
{
if (getTv().hasCapability(MediaPlayer.Display_Image)) {
photoButton.setEnabled(true);
photoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
showImage();
}
});
}
else {
disableButton(photoButton);
}
totalTimeDuration = -1;
loopingButton.setEnabled(getTv().hasCapability(MediaPlayer.Loop));
subtitlesButton.setEnabled(true);
if (getTv().hasCapability(MediaPlayer.Play_Video)) {
videoButton.setEnabled(true);
videoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
playVideo();
}
});
}
else {
disableButton(videoButton);
}
if (getTv().hasCapability(MediaPlayer.Play_Audio)) {
audioButton.setEnabled(true);
audioButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
playAudio();
}
});
} else {
disableButton(audioButton);
}
if (getTv().hasCapability(MediaPlayer.Play_Playlist)) {
playlistButton.setEnabled(true);
playlistButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
playM3U();
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Played_Playlist);
}
});
} else {
disableButton(playlistButton);
}
mVolumeBar.setEnabled(getTv().hasCapability(VolumeControl.Volume_Set));
mVolumeBar.setOnSeekBarChangeListener(volumeListener);
if (getTv().hasCapability(VolumeControl.Volume_Get)) {
getVolumeControl().getVolume(getVolumeListener);
}
if (getTv().hasCapability(VolumeControl.Volume_Subscribe)) {
getVolumeControl().subscribeVolume(getVolumeListener);
}
if (getTv().hasCapability(MediaPlayer.MediaInfo_Get)) {
mediaInfoButton.setEnabled(true);
mediaInfoButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
getMediaPlayer().getMediaInfo(mediaInfoListener);
}
});
}
else
mediaInfoButton.setEnabled(false);
if (getTv().hasCapability(MediaPlayer.MediaInfo_Subscribe)) {
getMediaPlayer().subscribeMediaInfo(mediaInfoListener);
}
if (!isPlaying || !isPlayingImage)
disableMedia();
if (isPlaying) enableMedia();
else if (isPlayingImage) {
closeButton.setEnabled(true);
closeButton.setOnClickListener(closeListener);
stopUpdating();
}
}
private void playAudio() {
String mediaURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/audio.mp3";
String iconURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/audioIcon.jpg";
String title = "The Song that Doesn't End";
String description = "Lamb Chop's Play Along";
String mimeType = "audio/mp3";
boolean shouldLoop = loopingButton.isChecked();
MediaInfo mediaInfo = new MediaInfo.Builder(mediaURL, mimeType)
.setTitle(title)
.setDescription(description)
.setIcon(iconURL)
.build();
getMediaPlayer().playMedia(mediaInfo, shouldLoop, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Error playing audio", error);
stopMediaSession();
}
@Override
public void onSuccess(MediaLaunchObject object) {
Log.d("LG", "Started playing audio");
launchSession = object.launchSession;
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Play_Audio);
mMediaControl = object.mediaControl;
mPlaylistControl = object.playlistControl;
stopUpdating();
enableMedia();
isPlaying = true;
}
});
}
private void playM3U() {
String mediaURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/example-m3u-playlist.m3u";
String iconURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/audioIcon.jpg";
String title = "Playlist";
String description = "Playlist description";
String mimeType = "application/x-mpegurl";
boolean shouldLoop = loopingButton.isChecked();
MediaInfo mediaInfo = new MediaInfo.Builder(mediaURL, mimeType)
.setTitle(title)
.setDescription(description)
.setIcon(iconURL)
.build();
getMediaPlayer().playMedia(mediaInfo, shouldLoop, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Error playing audio", error);
stopMediaSession();
}
@Override
public void onSuccess(MediaLaunchObject object) {
Log.d("LG", "Started playing playlist");
launchSession = object.launchSession;
mMediaControl = object.mediaControl;
mPlaylistControl = object.playlistControl;
stopUpdating();
enableMedia();
isPlaying = true;
}
});
}
private void showImage() {
disableMedia();
String imagePath = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/photo.jpg";
String mimeType = "image/jpeg";
String title = "Sintel Character Design";
String description = "Blender Open Movie Project";
String icon = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/photoIcon.jpg";
MediaInfo mediaInfo = new MediaInfo.Builder(imagePath, mimeType)
.setTitle(title)
.setDescription(description)
.setIcon(icon)
.build();
getMediaPlayer().displayImage(mediaInfo, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.e("Error", "Error displaying Image", error);
stopMediaSession();
}
@Override
public void onSuccess(MediaLaunchObject object) {
launchSession = object.launchSession;
closeButton.setEnabled(true);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode,
TestResponseObject.Display_image);
closeButton.setOnClickListener(closeListener);
stopUpdating();
isPlayingImage = true;
}
});
}
private void playVideo() {
boolean shouldLoop = loopingButton.isChecked();
SubtitleInfo.Builder subtitleBuilder = null;
if (subtitlesButton.isChecked()) {
subtitleBuilder = new SubtitleInfo.Builder(
getTv().hasCapability(MediaPlayer.Subtitle_WebVTT) ? URL_SUBTITLES_WEBVTT :
URL_SUBTITLE_SRT);
subtitleBuilder.setLabel("English").setLanguage("en");
}
MediaInfo mediaInfo = new MediaInfo.Builder(URL_VIDEO_MP4, "video/mp4")
.setTitle("Sintel Trailer")
.setDescription("Blender Open Movie Project")
.setIcon(URL_IMAGE_ICON)
.setSubtitleInfo(subtitleBuilder == null ? null : subtitleBuilder.build())
.build();
getMediaPlayer().playMedia(mediaInfo, shouldLoop, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.e("Error", "Error playing video", error);
stopMediaSession();
}
public void onSuccess(MediaLaunchObject object) {
launchSession = object.launchSession;
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode,
TestResponseObject.Play_Video);
mMediaControl = object.mediaControl;
mPlaylistControl = object.playlistControl;
stopUpdating();
enableMedia();
isPlaying = true;
}
});
}
private void stopMediaSession() {
// don't call launchSession.close() here, currently it can close
// a different web app in WebOS
if (launchSession != null) {
launchSession = null;
stopUpdating();
disableMedia();
isPlaying = isPlayingImage = false;
}
}
@Override
public void disableButtons() {
mSeekBar.setEnabled(false);
mVolumeBar.setEnabled(false);
mVolumeBar.setOnSeekBarChangeListener(null);
positionTextView.setEnabled(false);
durationTextView.setEnabled(false);
mediaInfoTextView.setText("");
mediaInfoImageView.setImageBitmap(null);
positionTrackView.setEnabled(false);
loopingButton.setChecked(false);
subtitlesButton.setEnabled(false);
super.disableButtons();
}
protected void onSeekBarMoved(long position) {
if (mMediaControl != null && getTv().hasCapability(MediaControl.Seek)) {
mSeeking = true;
mMediaControl.seek(position, new ResponseListener<Object>() {
@Override
public void onSuccess(Object response) {
Log.d("LG", "Success on Seeking");
mSeeking = false;
startUpdating();
}
@Override
public void onError(ServiceCommandError error) {
Log.w("Connect SDK", "Unable to seek: " + error.getCode());
mSeeking = false;
startUpdating();
}
});
}
}
public void enableMedia() {
playButton.setEnabled(getTv().hasCapability(MediaControl.Play));
pauseButton.setEnabled(getTv().hasCapability(MediaControl.Pause));
stopButton.setEnabled(getTv().hasCapability(MediaControl.Stop));
rewindButton.setEnabled(getTv().hasCapability(MediaControl.Rewind));
fastForwardButton.setEnabled(getTv().hasCapability(MediaControl.FastForward));
mSeekBar.setEnabled(getTv().hasCapability(MediaControl.Seek));
closeButton.setEnabled(getTv().hasCapability(MediaPlayer.Close));
previousButton.setEnabled(getTv().hasCapability(PlaylistControl.Previous));
nextButton.setEnabled(getTv().hasCapability(PlaylistControl.Next));
jumpButton.setEnabled(getTv().hasCapability(PlaylistControl.JumpToTrack));
positionTrackView.setEnabled(getTv().hasCapability(PlaylistControl.JumpToTrack));
fastForwardButton.setOnClickListener(fastForwardListener);
mSeekBar.setOnSeekBarChangeListener(seekListener);
rewindButton.setOnClickListener(rewindListener);
stopButton.setOnClickListener(stopListener);
playButton.setOnClickListener(playListener);
pauseButton.setOnClickListener(pauseListener);
previousButton.setOnClickListener(previousListener);
nextButton.setOnClickListener(nextListener);
jumpButton.setOnClickListener(jumpListener);
closeButton.setOnClickListener(closeListener);
if (getTv().hasCapability(MediaControl.PlayState_Subscribe) && !isPlaying) {
mMediaControl.subscribePlayState(playStateListener);
} else {
if (mMediaControl != null) {
mMediaControl.getDuration(durationListener);
}
startUpdating();
}
}
public void disableMedia() {
closeButton.setEnabled(false);
closeButton.setOnClickListener(null);
stopMedia();
}
public void stopMedia() {
playButton.setEnabled(false);
playButton.setOnClickListener(null);
pauseButton.setEnabled(false);
pauseButton.setOnClickListener(null);
stopButton.setEnabled(false);
stopButton.setOnClickListener(null);
rewindButton.setEnabled(false);
rewindButton.setOnClickListener(null);
fastForwardButton.setEnabled(false);
fastForwardButton.setOnClickListener(null);
previousButton.setEnabled(false);
previousButton.setOnClickListener(null);
nextButton.setEnabled(false);
nextButton.setOnClickListener(null);
jumpButton.setEnabled(false);
jumpButton.setOnClickListener(null);
positionTrackView.setEnabled(false);
mSeekBar.setEnabled(false);
mSeekBar.setOnSeekBarChangeListener(null);
mSeekBar.setProgress(0);
positionTextView.setText("--:--:--");
durationTextView.setText("--:--:--");
totalTimeDuration = -1;
}
public View.OnClickListener playListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.play(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Played_Media);
}
};
public View.OnClickListener pauseListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.pause(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Paused_Media);
}
};
public View.OnClickListener previousListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mPlaylistControl != null)
mPlaylistControl.previous(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Previous);
}
};
public View.OnClickListener nextListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mPlaylistControl != null)
mPlaylistControl.next(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Next);
}
};
public View.OnClickListener jumpListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mPlaylistControl != null) {
mPlaylistControl.jumpToTrack(Integer.parseInt(positionTrackView.getText().toString()), null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Jump);
}
}
};
public View.OnClickListener closeListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (getMediaPlayer() != null) {
if (launchSession != null)
launchSession.close(null);
launchSession = null;
disableMedia();
stopUpdating();
isPlaying = isPlayingImage = false;
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Closed_Media);
}
}
};
public View.OnClickListener stopListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.stop(new ResponseListener<Object>() {
@Override
public void onSuccess(Object response) {
stopMedia();
stopUpdating();
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Stopped_Media);
isPlaying = false;
isPlayingImage = true;
}
@Override
public void onError(ServiceCommandError error) {
}
});
}
};
public View.OnClickListener rewindListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.rewind(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Rewind_Media);
}
};
public View.OnClickListener fastForwardListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.fastForward(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.FastForward_Media);
}
};
public OnSeekBarChangeListener seekListener = new SeekBar.OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
mIsUserSeeking = false;
mSeekBar.setSecondaryProgress(0);
onSeekBarMoved(seekBar.getProgress());
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
mIsUserSeeking = true;
mSeekBar.setSecondaryProgress(seekBar.getProgress());
stopUpdating();
}
@Override
public void onProgressChanged(SeekBar arg0, int arg1, boolean arg2) {
}
};
public OnSeekBarChangeListener volumeListener = new OnSeekBarChangeListener() {
@Override public void onStopTrackingTouch(SeekBar arg0) { }
@Override public void onStartTrackingTouch(SeekBar arg0) { }
@Override
public void onProgressChanged(SeekBar seekBar, int position, boolean fromUser) {
if (fromUser)
getVolumeControl().setVolume((float) mVolumeBar.getProgress() / 100.0f, null);
}
};
public VolumeListener getVolumeListener = new VolumeListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Error getting Volume: " + error);
}
@Override
public void onSuccess(Float object) {
mVolumeBar.setProgress((int) (object * 100.0f));
}
};
public PlayStateListener playStateListener = new PlayStateListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Playstate Listener error = " + error);
}
@Override
public void onSuccess(PlayStateStatus playState) {
Log.d("LG", "Playstate changed | playState = " + playState);
switch (playState) {
case Playing:
startUpdating();
if (mMediaControl != null && getTv().hasCapability(MediaControl.Duration)) {
mMediaControl.getDuration(durationListener);
}
break;
case Finished:
positionTextView.setText("--:--");
durationTextView.setText("--:--");
mSeekBar.setProgress(0);
default:
stopUpdating();
break;
}
}
};
private void startUpdating() {
if (refreshTimer != null) {
refreshTimer.cancel();
refreshTimer = null;
}
refreshTimer = new Timer();
refreshTimer.schedule(new TimerTask() {
@Override
public void run() {
Log.d("LG", "Updating information");
if (mMediaControl != null && getTv() != null && getTv().hasCapability(MediaControl.Position)) {
mMediaControl.getPosition(positionListener);
}
if (mMediaControl != null
&& getTv() != null
&& getTv().hasCapability(MediaControl.Duration)
&& !getTv().hasCapability(MediaControl.PlayState_Subscribe)
&& totalTimeDuration <= 0) {
mMediaControl.getDuration(durationListener);
}
}
}, 0, REFRESH_INTERVAL_MS);
}
private void stopUpdating() {
if (refreshTimer == null)
return;
refreshTimer.cancel();
refreshTimer = null;
}
private PositionListener positionListener = new PositionListener() {
@Override public void onError(ServiceCommandError error) { }
@Override
public void onSuccess(Long position) {
positionTextView.setText(formatTime(position.intValue()));
mSeekBar.setProgress(position.intValue());
}
};
private DurationListener durationListener = new DurationListener() {
@Override public void onError(ServiceCommandError error) { }
@Override
public void onSuccess(Long duration) {
totalTimeDuration = duration;
mSeekBar.setMax(duration.intValue());
durationTextView.setText(formatTime(duration.intValue()));
}
};
private MediaInfoListener mediaInfoListener = new MediaInfoListener() {
@Override
public void onSuccess(MediaInfo mediaInfo) {
String text = mediaInfo.getTitle();
text += "\n";
text += mediaInfo.getDescription();
mediaInfoTextView.setText(text);
final String stringUrl = mediaInfo.getImages().get(0).getUrl();
if (stringUrl!=null) new DownloadImageTask(mediaInfoImageView).execute(stringUrl);
}
@Override
public void onError(ServiceCommandError error) {
}
};
private String formatTime(long millisec) {
int seconds = (int) (millisec / 1000);
int hours = seconds / (60 * 60);
seconds %= (60 * 60);
int minutes = seconds / 60;
seconds %= 60;
String time;
if (hours > 0) {
time = String.format(Locale.US, "%d:%02d:%02d", hours, minutes, seconds);
}
else {
time = String.format(Locale.US, "%d:%02d", minutes, seconds);
}
return time;
}
private class DownloadImageTask extends AsyncTask<String, Void, Bitmap> {
ImageView bmImage;
public DownloadImageTask(ImageView bmImage) {
this.bmImage = bmImage;
}
protected Bitmap doInBackground(String... urls) {
String urldisplay = urls[0];
Bitmap mIcon11 = null;
try {
Log.d("", urldisplay);
InputStream in = new java.net.URL(urldisplay).openStream();
mIcon11 = BitmapFactory.decodeStream(in);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
return mIcon11;
}
protected void onPostExecute(Bitmap result) {
bmImage.setImageBitmap(result);
}
}
}
|
src/com/connectsdk/sampler/fragments/MediaPlayerFragment.java
|
//
// Connect SDK Sample App by LG Electronics
//
// To the extent possible under law, the person who associated CC0 with
// this sample app has waived all copyright and related or neighboring rights
// to the sample app.
//
// You should have received a copy of the CC0 legalcode along with this
// work. If not, see http://creativecommons.org/publicdomain/zero/1.0/.
//
package com.connectsdk.sampler.fragments;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;
import com.connectsdk.core.MediaInfo;
import com.connectsdk.core.SubtitleInfo;
import com.connectsdk.device.ConnectableDevice;
import com.connectsdk.sampler.R;
import com.connectsdk.sampler.util.TestResponseObject;
import com.connectsdk.service.capability.MediaControl;
import com.connectsdk.service.capability.MediaControl.DurationListener;
import com.connectsdk.service.capability.MediaControl.PlayStateListener;
import com.connectsdk.service.capability.MediaControl.PlayStateStatus;
import com.connectsdk.service.capability.MediaControl.PositionListener;
import com.connectsdk.service.capability.MediaPlayer;
import com.connectsdk.service.capability.MediaPlayer.MediaInfoListener;
import com.connectsdk.service.capability.MediaPlayer.MediaLaunchObject;
import com.connectsdk.service.capability.PlaylistControl;
import com.connectsdk.service.capability.VolumeControl;
import com.connectsdk.service.capability.VolumeControl.VolumeListener;
import com.connectsdk.service.capability.listeners.ResponseListener;
import com.connectsdk.service.command.ServiceCommandError;
import com.connectsdk.service.sessions.LaunchSession;
import java.io.InputStream;
import java.util.Locale;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
public class MediaPlayerFragment extends BaseFragment {
public static final String URL_SUBTITLES_WEBVTT =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/sintel_en.vtt";
public static final String URL_SUBTITLE_SRT =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/sintel_en.srt";
public static final String URL_VIDEO_MP4 =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/video.mp4";
public static final String URL_IMAGE_ICON =
"http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/videoIcon.jpg";
public Button photoButton;
public Button videoButton;
public Button audioButton;
public Button playButton;
public Button pauseButton;
public Button stopButton;
public Button rewindButton;
public Button fastForwardButton;
public Button closeButton;
public Button mediaInfoButton;
public Button playlistButton;
public Button previousButton;
public Button nextButton;
public Button jumpButton;
public CheckBox loopingButton;
public CheckBox subtitlesButton;
public LaunchSession launchSession;
public TextView positionTextView;
public TextView durationTextView;
public TextView mediaInfoTextView;
public SeekBar mSeekBar;
public boolean mIsUserSeeking;
public SeekBar mVolumeBar;
public EditText positionTrackView;
public ImageView mediaInfoImageView;
public boolean mSeeking;
public Runnable mRefreshRunnable;
public final int REFRESH_INTERVAL_MS = (int) TimeUnit.SECONDS.toMillis(1);
public Handler mHandler;
public long totalTimeDuration;
public boolean mIsGettingPlayPosition;
boolean isPlayingImage = false;
boolean isPlaying = false;
private MediaControl mMediaControl = null;
private PlaylistControl mPlaylistControl = null;
private Timer refreshTimer;
public TestResponseObject testResponse;
public MediaPlayerFragment() {};
public MediaPlayerFragment(Context context)
{
super(context);
mIsUserSeeking = false;
mSeeking = false;
mIsGettingPlayPosition = false;
testResponse = new TestResponseObject();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
setRetainInstance(true);
View rootView = inflater.inflate(
R.layout.fragment_media_player, container, false);
photoButton = (Button) rootView.findViewById(R.id.photoButton);
videoButton = (Button) rootView.findViewById(R.id.videoButton);
audioButton = (Button) rootView.findViewById(R.id.audioButton);
playButton = (Button) rootView.findViewById(R.id.playButton);
pauseButton = (Button) rootView.findViewById(R.id.pauseButton);
stopButton = (Button) rootView.findViewById(R.id.stopButton);
rewindButton = (Button) rootView.findViewById(R.id.rewindButton);
fastForwardButton = (Button) rootView.findViewById(R.id.fastForwardButton);
closeButton = (Button) rootView.findViewById(R.id.closeButton);
mediaInfoButton = (Button) rootView.findViewById(R.id.mediaInfo_button);
playlistButton = (Button) rootView.findViewById(R.id.playlistButton);
previousButton = (Button) rootView.findViewById(R.id.previousButton);
nextButton = (Button) rootView.findViewById(R.id.nextButton);
jumpButton = (Button) rootView.findViewById(R.id.jumpButton);
loopingButton = (CheckBox) rootView.findViewById(R.id.loopingButton);
subtitlesButton = (CheckBox) rootView.findViewById(R.id.subtitlesButton);
positionTextView = (TextView) rootView.findViewById(R.id.stream_position);
durationTextView = (TextView) rootView.findViewById(R.id.stream_duration);
mediaInfoTextView = (TextView) rootView.findViewById(R.id.mediaInfo_textView);
mSeekBar = (SeekBar) rootView.findViewById(R.id.stream_seek_bar);
mVolumeBar = (SeekBar) rootView.findViewById(R.id.volume_seek_bar);
positionTrackView = (EditText) rootView.findViewById(R.id.positionText);
mediaInfoImageView = (ImageView) rootView.findViewById(R.id.mediaInfo_imageView);
buttons = new Button[] {
photoButton,
videoButton,
audioButton,
playButton,
pauseButton,
stopButton,
rewindButton,
fastForwardButton,
closeButton,
mediaInfoButton,
playlistButton,
previousButton,
nextButton,
jumpButton,
loopingButton,
subtitlesButton,
};
mHandler = new Handler();
return rootView;
}
@Override
public void setTv(ConnectableDevice tv) {
super.setTv(tv);
if (tv == null) {
stopUpdating();
mMediaControl = null;
mPlaylistControl = null;
}
}
@Override
public void onResume() {
super.onResume();
if (isPlaying) {
startUpdating();
}
}
@Override
public void onPause() {
stopUpdating();
super.onPause();
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Paused_Media);
}
@Override
public void enableButtons()
{
if (getTv().hasCapability(MediaPlayer.Display_Image)) {
photoButton.setEnabled(true);
photoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
showImage();
}
});
}
else {
disableButton(photoButton);
}
totalTimeDuration = -1;
loopingButton.setEnabled(getTv().hasCapability(MediaPlayer.Loop));
subtitlesButton.setEnabled(true);
if (getTv().hasCapability(MediaPlayer.Play_Video)) {
videoButton.setEnabled(true);
videoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
playVideo();
}
});
}
else {
disableButton(videoButton);
}
if (getTv().hasCapability(MediaPlayer.Play_Audio)) {
audioButton.setEnabled(true);
audioButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
playAudio();
}
});
} else {
disableButton(audioButton);
}
if (getTv().hasCapability(MediaPlayer.Play_Playlist)) {
playlistButton.setEnabled(true);
playlistButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
playM3U();
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Played_Playlist);
}
});
} else {
disableButton(playlistButton);
}
mVolumeBar.setEnabled(getTv().hasCapability(VolumeControl.Volume_Set));
mVolumeBar.setOnSeekBarChangeListener(volumeListener);
if (getTv().hasCapability(VolumeControl.Volume_Get)) {
getVolumeControl().getVolume(getVolumeListener);
}
if (getTv().hasCapability(VolumeControl.Volume_Subscribe)) {
getVolumeControl().subscribeVolume(getVolumeListener);
}
if (getTv().hasCapability(MediaPlayer.MediaInfo_Get)) {
mediaInfoButton.setEnabled(true);
mediaInfoButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
getMediaPlayer().getMediaInfo(mediaInfoListener);
}
});
}
else
mediaInfoButton.setEnabled(false);
if (getTv().hasCapability(MediaPlayer.MediaInfo_Subscribe)) {
getMediaPlayer().subscribeMediaInfo(mediaInfoListener);
}
if (!isPlaying || !isPlayingImage)
disableMedia();
if (isPlaying) enableMedia();
else if (isPlayingImage) {
closeButton.setEnabled(true);
closeButton.setOnClickListener(closeListener);
stopUpdating();
}
}
private void playAudio() {
String mediaURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/audio.mp3";
String iconURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/audioIcon.jpg";
String title = "The Song that Doesn't End";
String description = "Lamb Chop's Play Along";
String mimeType = "audio/mp3";
boolean shouldLoop = loopingButton.isChecked();
getMediaPlayer().playMedia(mediaURL, mimeType, title, description, iconURL, shouldLoop, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Error playing audio", error);
stopMediaSession();
}
@Override
public void onSuccess(MediaLaunchObject object) {
Log.d("LG", "Started playing audio");
launchSession = object.launchSession;
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Play_Audio);
mMediaControl = object.mediaControl;
mPlaylistControl = object.playlistControl;
stopUpdating();
enableMedia();
isPlaying = true;
}
});
}
private void playM3U() {
String mediaURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/example-m3u-playlist.m3u";
String iconURL = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/audioIcon.jpg";
String title = "Playlist";
String description = "Playlist description";
String mimeType = "application/x-mpegurl";
boolean shouldLoop = loopingButton.isChecked();
getMediaPlayer().playMedia(mediaURL, mimeType, title, description, iconURL, shouldLoop, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Error playing audio", error);
stopMediaSession();
}
@Override
public void onSuccess(MediaLaunchObject object) {
Log.d("LG", "Started playing playlist");
launchSession = object.launchSession;
mMediaControl = object.mediaControl;
mPlaylistControl = object.playlistControl;
stopUpdating();
enableMedia();
isPlaying = true;
}
});
}
private void showImage() {
disableMedia();
String imagePath = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/photo.jpg";
String mimeType = "image/jpeg";
String title = "Sintel Character Design";
String description = "Blender Open Movie Project";
String icon = "http://ec2-54-201-108-205.us-west-2.compute.amazonaws.com/samples/media/photoIcon.jpg";
getMediaPlayer().displayImage(imagePath, mimeType, title, description, icon, new
MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.e("Error", "Error displaying Image", error);
stopMediaSession();
}
@Override
public void onSuccess(MediaLaunchObject object) {
launchSession = object.launchSession;
closeButton.setEnabled(true);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode,
TestResponseObject.Display_image);
closeButton.setOnClickListener(closeListener);
stopUpdating();
isPlayingImage = true;
}
});
}
private void playVideo() {
boolean shouldLoop = loopingButton.isChecked();
SubtitleInfo.Builder subtitleBuilder = null;
if (subtitlesButton.isChecked()) {
subtitleBuilder = new SubtitleInfo.Builder(
getTv().hasCapability(MediaPlayer.Subtitle_WebVTT) ? URL_SUBTITLES_WEBVTT :
URL_SUBTITLE_SRT);
subtitleBuilder.setLabel("English").setLanguage("en");
}
MediaInfo mediaInfo = new MediaInfo.Builder(URL_VIDEO_MP4, "video/mp4")
.setTitle("Sintel Trailer")
.setDescription("Blender Open Movie Project")
.setIcon(URL_IMAGE_ICON)
.setSubtitleInfo(subtitleBuilder == null ? null : subtitleBuilder.build())
.build();
getMediaPlayer().playMedia(mediaInfo, shouldLoop, new MediaPlayer.LaunchListener() {
@Override
public void onError(ServiceCommandError error) {
Log.e("Error", "Error playing video", error);
stopMediaSession();
}
public void onSuccess(MediaLaunchObject object) {
launchSession = object.launchSession;
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode,
TestResponseObject.Play_Video);
mMediaControl = object.mediaControl;
mPlaylistControl = object.playlistControl;
stopUpdating();
enableMedia();
isPlaying = true;
}
});
}
private void stopMediaSession() {
// don't call launchSession.close() here, currently it can close
// a different web app in WebOS
if (launchSession != null) {
launchSession = null;
stopUpdating();
disableMedia();
isPlaying = isPlayingImage = false;
}
}
@Override
public void disableButtons() {
mSeekBar.setEnabled(false);
mVolumeBar.setEnabled(false);
mVolumeBar.setOnSeekBarChangeListener(null);
positionTextView.setEnabled(false);
durationTextView.setEnabled(false);
mediaInfoTextView.setText("");
mediaInfoImageView.setImageBitmap(null);
positionTrackView.setEnabled(false);
loopingButton.setChecked(false);
subtitlesButton.setEnabled(false);
super.disableButtons();
}
protected void onSeekBarMoved(long position) {
if (mMediaControl != null && getTv().hasCapability(MediaControl.Seek)) {
mSeeking = true;
mMediaControl.seek(position, new ResponseListener<Object>() {
@Override
public void onSuccess(Object response) {
Log.d("LG", "Success on Seeking");
mSeeking = false;
startUpdating();
}
@Override
public void onError(ServiceCommandError error) {
Log.w("Connect SDK", "Unable to seek: " + error.getCode());
mSeeking = false;
startUpdating();
}
});
}
}
public void enableMedia() {
playButton.setEnabled(getTv().hasCapability(MediaControl.Play));
pauseButton.setEnabled(getTv().hasCapability(MediaControl.Pause));
stopButton.setEnabled(getTv().hasCapability(MediaControl.Stop));
rewindButton.setEnabled(getTv().hasCapability(MediaControl.Rewind));
fastForwardButton.setEnabled(getTv().hasCapability(MediaControl.FastForward));
mSeekBar.setEnabled(getTv().hasCapability(MediaControl.Seek));
closeButton.setEnabled(getTv().hasCapability(MediaPlayer.Close));
previousButton.setEnabled(getTv().hasCapability(PlaylistControl.Previous));
nextButton.setEnabled(getTv().hasCapability(PlaylistControl.Next));
jumpButton.setEnabled(getTv().hasCapability(PlaylistControl.JumpToTrack));
positionTrackView.setEnabled(getTv().hasCapability(PlaylistControl.JumpToTrack));
fastForwardButton.setOnClickListener(fastForwardListener);
mSeekBar.setOnSeekBarChangeListener(seekListener);
rewindButton.setOnClickListener(rewindListener);
stopButton.setOnClickListener(stopListener);
playButton.setOnClickListener(playListener);
pauseButton.setOnClickListener(pauseListener);
previousButton.setOnClickListener(previousListener);
nextButton.setOnClickListener(nextListener);
jumpButton.setOnClickListener(jumpListener);
closeButton.setOnClickListener(closeListener);
if (getTv().hasCapability(MediaControl.PlayState_Subscribe) && !isPlaying) {
mMediaControl.subscribePlayState(playStateListener);
} else {
if (mMediaControl != null) {
mMediaControl.getDuration(durationListener);
}
startUpdating();
}
}
public void disableMedia() {
closeButton.setEnabled(false);
closeButton.setOnClickListener(null);
stopMedia();
}
public void stopMedia() {
playButton.setEnabled(false);
playButton.setOnClickListener(null);
pauseButton.setEnabled(false);
pauseButton.setOnClickListener(null);
stopButton.setEnabled(false);
stopButton.setOnClickListener(null);
rewindButton.setEnabled(false);
rewindButton.setOnClickListener(null);
fastForwardButton.setEnabled(false);
fastForwardButton.setOnClickListener(null);
previousButton.setEnabled(false);
previousButton.setOnClickListener(null);
nextButton.setEnabled(false);
nextButton.setOnClickListener(null);
jumpButton.setEnabled(false);
jumpButton.setOnClickListener(null);
positionTrackView.setEnabled(false);
mSeekBar.setEnabled(false);
mSeekBar.setOnSeekBarChangeListener(null);
mSeekBar.setProgress(0);
positionTextView.setText("--:--:--");
durationTextView.setText("--:--:--");
totalTimeDuration = -1;
}
public View.OnClickListener playListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.play(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Played_Media);
}
};
public View.OnClickListener pauseListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.pause(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Paused_Media);
}
};
public View.OnClickListener previousListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mPlaylistControl != null)
mPlaylistControl.previous(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Previous);
}
};
public View.OnClickListener nextListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mPlaylistControl != null)
mPlaylistControl.next(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Next);
}
};
public View.OnClickListener jumpListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mPlaylistControl != null) {
mPlaylistControl.jumpToTrack(Integer.parseInt(positionTrackView.getText().toString()), null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Jump);
}
}
};
public View.OnClickListener closeListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (getMediaPlayer() != null) {
if (launchSession != null)
launchSession.close(null);
launchSession = null;
disableMedia();
stopUpdating();
isPlaying = isPlayingImage = false;
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Closed_Media);
}
}
};
public View.OnClickListener stopListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.stop(new ResponseListener<Object>() {
@Override
public void onSuccess(Object response) {
stopMedia();
stopUpdating();
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Stopped_Media);
isPlaying = false;
isPlayingImage = true;
}
@Override
public void onError(ServiceCommandError error) {
}
});
}
};
public View.OnClickListener rewindListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.rewind(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.Rewind_Media);
}
};
public View.OnClickListener fastForwardListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMediaControl != null)
mMediaControl.fastForward(null);
testResponse = new TestResponseObject(true, TestResponseObject.SuccessCode, TestResponseObject.FastForward_Media);
}
};
public OnSeekBarChangeListener seekListener = new SeekBar.OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
mIsUserSeeking = false;
mSeekBar.setSecondaryProgress(0);
onSeekBarMoved(seekBar.getProgress());
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
mIsUserSeeking = true;
mSeekBar.setSecondaryProgress(seekBar.getProgress());
stopUpdating();
}
@Override
public void onProgressChanged(SeekBar arg0, int arg1, boolean arg2) {
}
};
public OnSeekBarChangeListener volumeListener = new OnSeekBarChangeListener() {
@Override public void onStopTrackingTouch(SeekBar arg0) { }
@Override public void onStartTrackingTouch(SeekBar arg0) { }
@Override
public void onProgressChanged(SeekBar seekBar, int position, boolean fromUser) {
if (fromUser)
getVolumeControl().setVolume((float) mVolumeBar.getProgress() / 100.0f, null);
}
};
public VolumeListener getVolumeListener = new VolumeListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Error getting Volume: " + error);
}
@Override
public void onSuccess(Float object) {
mVolumeBar.setProgress((int) (object * 100.0f));
}
};
public PlayStateListener playStateListener = new PlayStateListener() {
@Override
public void onError(ServiceCommandError error) {
Log.d("LG", "Playstate Listener error = " + error);
}
@Override
public void onSuccess(PlayStateStatus playState) {
Log.d("LG", "Playstate changed | playState = " + playState);
switch (playState) {
case Playing:
startUpdating();
if (mMediaControl != null && getTv().hasCapability(MediaControl.Duration)) {
mMediaControl.getDuration(durationListener);
}
break;
case Finished:
positionTextView.setText("--:--");
durationTextView.setText("--:--");
mSeekBar.setProgress(0);
default:
stopUpdating();
break;
}
}
};
private void startUpdating() {
if (refreshTimer != null) {
refreshTimer.cancel();
refreshTimer = null;
}
refreshTimer = new Timer();
refreshTimer.schedule(new TimerTask() {
@Override
public void run() {
Log.d("LG", "Updating information");
if (mMediaControl != null && getTv() != null && getTv().hasCapability(MediaControl.Position)) {
mMediaControl.getPosition(positionListener);
}
if (mMediaControl != null
&& getTv() != null
&& getTv().hasCapability(MediaControl.Duration)
&& !getTv().hasCapability(MediaControl.PlayState_Subscribe)
&& totalTimeDuration <= 0) {
mMediaControl.getDuration(durationListener);
}
}
}, 0, REFRESH_INTERVAL_MS);
}
private void stopUpdating() {
if (refreshTimer == null)
return;
refreshTimer.cancel();
refreshTimer = null;
}
private PositionListener positionListener = new PositionListener() {
@Override public void onError(ServiceCommandError error) { }
@Override
public void onSuccess(Long position) {
positionTextView.setText(formatTime(position.intValue()));
mSeekBar.setProgress(position.intValue());
}
};
private DurationListener durationListener = new DurationListener() {
@Override public void onError(ServiceCommandError error) { }
@Override
public void onSuccess(Long duration) {
totalTimeDuration = duration;
mSeekBar.setMax(duration.intValue());
durationTextView.setText(formatTime(duration.intValue()));
}
};
private MediaInfoListener mediaInfoListener = new MediaInfoListener() {
@Override
public void onSuccess(MediaInfo mediaInfo) {
String text = mediaInfo.getTitle();
text += "\n";
text += mediaInfo.getDescription();
mediaInfoTextView.setText(text);
final String stringUrl = mediaInfo.getImages().get(0).getUrl();
if (stringUrl!=null) new DownloadImageTask(mediaInfoImageView).execute(stringUrl);
}
@Override
public void onError(ServiceCommandError error) {
}
};
private String formatTime(long millisec) {
int seconds = (int) (millisec / 1000);
int hours = seconds / (60 * 60);
seconds %= (60 * 60);
int minutes = seconds / 60;
seconds %= 60;
String time;
if (hours > 0) {
time = String.format(Locale.US, "%d:%02d:%02d", hours, minutes, seconds);
}
else {
time = String.format(Locale.US, "%d:%02d", minutes, seconds);
}
return time;
}
private class DownloadImageTask extends AsyncTask<String, Void, Bitmap> {
ImageView bmImage;
public DownloadImageTask(ImageView bmImage) {
this.bmImage = bmImage;
}
protected Bitmap doInBackground(String... urls) {
String urldisplay = urls[0];
Bitmap mIcon11 = null;
try {
Log.d("", urldisplay);
InputStream in = new java.net.URL(urldisplay).openStream();
mIcon11 = BitmapFactory.decodeStream(in);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
return mIcon11;
}
protected void onPostExecute(Bitmap result) {
bmImage.setImageBitmap(result);
}
}
}
|
Use MediaInfo.Builder in the sampler app
|
src/com/connectsdk/sampler/fragments/MediaPlayerFragment.java
|
Use MediaInfo.Builder in the sampler app
|
|
Java
|
agpl-3.0
|
0e99a684d6b325f15ad2d1182768a3bff240a1ae
| 0
|
Wikimedia-TW/han3_ji7_tsoo1_kian3_WM,sih4sing5hong5/han3_ji7_tsoo1_kian3
|
package cc.core;
/**
* 僅限雙組合符號的正規化型態。
*
* @author Ihc
*/
public class 組字式部件正規化
{
/**
* 正規化部件結構。
*
* @param 部件
* 欲正規化的物件
*/
public void 正規化(ChineseCharacter 部件)
{
if (ChineseCharacterTzuCombinationType.isCombinationType(部件
.getCodePoint()))
{
ChineseCharacterTzu 字部件 = (ChineseCharacterTzu) 部件;
if (字部件.getType().有結合律無())
{
if (字部件.getChildren().length == 2)
{
// 倒爿部件.getCodePoint() == 字部件.getCodePoint())
while (字部件.getChildren()[0].getCodePoint() == 字部件
.getCodePoint())
{
ChineseCharacter 倒爿部件 = 字部件.getChildren()[0];
ChineseCharacterTzu 倒爿字部件 = (ChineseCharacterTzu) 倒爿部件;
ChineseCharacter 倒倒爿部件 = 倒爿字部件.getChildren()[0], 倒正爿部件 = 倒爿字部件
.getChildren()[1], 正爿部件 = 字部件.getChildren()[1];
字部件.getChildren()[0] = 倒倒爿部件;
字部件.getChildren()[1] = 倒爿部件;
倒爿字部件.getChildren()[0] = 倒正爿部件;
倒爿字部件.getChildren()[1] = 正爿部件;
}
}
else
{
System.out.println("有三个以上的部件組合符號!!");// TODO log
}
}
for (ChineseCharacter 子部件 : 字部件.getChildren())
正規化(子部件);
}
return;
}
}
|
src/cc/core/組字式部件正規化.java
|
package cc.core;
/**
* 僅限雙組合符號的正規化型態。
*
* @author Ihc
*/
public class 組字式部件正規化
{
/**
* 正規化部件結構。
*
* @param 部件
* 欲正規化的物件
*/
public void 正規化(ChineseCharacter 部件)
{
if (ChineseCharacterTzuCombinationType.isCombinationType(部件
.getCodePoint()))
{
ChineseCharacterTzu 字部件 = (ChineseCharacterTzu) 部件;
if (字部件.getType().有結合律無())
{
if (字部件.getChildren().length == 2)
{
// 倒爿部件.getCodePoint() == 字部件.getCodePoint())
while (字部件.getChildren()[0].getCodePoint() == 字部件
.getCodePoint())
{
ChineseCharacter 倒爿部件 = 字部件.getChildren()[0];
ChineseCharacterTzu 倒爿字部件 = (ChineseCharacterTzu) 倒爿部件;
ChineseCharacter 倒倒爿部件 = 倒爿字部件.getChildren()[0], 倒正爿部件 = 倒爿字部件
.getChildren()[1], 正爿部件 = 字部件.getChildren()[1];
字部件.getChildren()[0] = 倒倒爿部件;
字部件.getChildren()[1] = 倒爿部件;
倒爿字部件.getChildren()[0] = 倒正爿部件;
倒爿字部件.getChildren()[1] = 正爿部件;
}
}
else
{
System.out.println("有三个以上的部件組合符號!!");
}
}
for (ChineseCharacter 子部件 : 字部件.getChildren())
正規化(子部件);
}
return;
}
}
|
共以後愛改的所在加一个記號 丞宏
|
src/cc/core/組字式部件正規化.java
|
共以後愛改的所在加一个記號 丞宏
|
|
Java
|
lgpl-2.1
|
9792b4b70a0aea37653ad19b5b8c978cd36fa0af
| 0
|
xwiki/xwiki-commons,xwiki/xwiki-commons
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.extension.repository.xwiki.internal.resources;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
import org.xwiki.component.annotation.Component;
import org.xwiki.extension.repository.xwiki.Resources;
import org.xwiki.extension.repository.xwiki.model.jaxb.ExtensionsSearchResult;
import org.xwiki.query.Query;
import org.xwiki.query.QueryException;
/**
* @version $Id$
* @since 3.2M3
*/
@Component("org.xwiki.extension.repository.xwiki.internal.resources.SearchRESTResource")
@Path(Resources.SEARCH)
public class SearchRESTResource extends AbstractExtensionRESTResource
{
private static final String WHERE = "extension.id like :pattern or extension.name like :pattern"
+ " or extension.summary like :pattern or extension.description like :pattern";
/**
* @since 3.3M2
*/
@GET
public ExtensionsSearchResult search(@QueryParam(Resources.QPARAM_SEARCH_QUERY) @DefaultValue("") String pattern,
@QueryParam(Resources.QPARAM_LIST_START) @DefaultValue("0") int offset,
@QueryParam(Resources.QPARAM_LIST_NUMBER) @DefaultValue("-1") int number,
@QueryParam(Resources.QPARAM_SEARCH_REQUIRETOTALHITS) @DefaultValue("true") boolean requireTotalHits)
throws QueryException
{
ExtensionsSearchResult result = this.objectFactory.createExtensionsSearchResult();
result.setOffset(offset);
if (requireTotalHits) {
Query query = createExtensionsCountQuery(null, WHERE);
query.bindValue("pattern", '%' + pattern + '%');
result.setTotalHits((int) getExtensionsCountResult(query));
} else {
result.setTotalHits(-1);
}
if (number != 0 && (result.getTotalHits() == -1 || offset < result.getTotalHits())) {
Query query = createExtensionsQuery(null, WHERE, offset, number);
query.bindValue("pattern", '%' + pattern + '%');
getExtensions(result.getExtensions(), query);
}
return result;
}
}
|
xwiki-commons-core/xwiki-commons-extension/xwiki-platform-extension-repositories/xwiki-platform-extension-repository-xwiki/xwiki-platform-extension-repository-xwiki-server-api/src/main/java/org/xwiki/extension/repository/xwiki/internal/resources/SearchRESTResource.java
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.extension.repository.xwiki.internal.resources;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
import org.xwiki.component.annotation.Component;
import org.xwiki.extension.repository.xwiki.Resources;
import org.xwiki.extension.repository.xwiki.model.jaxb.ExtensionsSearchResult;
import org.xwiki.query.Query;
import org.xwiki.query.QueryException;
/**
* @version $Id$
* @since 3.2M3
*/
@Component("org.xwiki.extension.repository.xwiki.internal.resources.SearchRESTResource")
@Path(Resources.SEARCH)
public class SearchRESTResource extends AbstractExtensionRESTResource
{
/**
* @since 3.3M2
*/
@GET
public ExtensionsSearchResult search(@QueryParam(Resources.QPARAM_SEARCH_QUERY) @DefaultValue("") String pattern,
@QueryParam(Resources.QPARAM_LIST_START) @DefaultValue("0") int offset,
@QueryParam(Resources.QPARAM_LIST_NUMBER) @DefaultValue("-1") int number,
@QueryParam(Resources.QPARAM_SEARCH_REQUIRETOTALHITS) @DefaultValue("true") boolean requireTotalHits)
throws QueryException
{
String where =
"extension.id like :pattern or extension.name like :pattern or extension.description like :pattern";
ExtensionsSearchResult result = this.objectFactory.createExtensionsSearchResult();
result.setOffset(offset);
if (requireTotalHits) {
Query query = createExtensionsCountQuery(null, where);
query.bindValue("pattern", '%' + pattern + '%');
result.setTotalHits((int) getExtensionsCountResult(query));
} else {
result.setTotalHits(-1);
}
if (number != 0 && (result.getTotalHits() == -1 || offset < result.getTotalHits())) {
Query query = createExtensionsQuery(null, where, offset, number);
query.bindValue("pattern", '%' + pattern + '%');
getExtensions(result.getExtensions(), query);
}
return result;
}
}
|
XWIKI-7252: Add summary in the simple search on XR
|
xwiki-commons-core/xwiki-commons-extension/xwiki-platform-extension-repositories/xwiki-platform-extension-repository-xwiki/xwiki-platform-extension-repository-xwiki-server-api/src/main/java/org/xwiki/extension/repository/xwiki/internal/resources/SearchRESTResource.java
|
XWIKI-7252: Add summary in the simple search on XR
|
|
Java
|
apache-2.0
|
0aeebffcb7f97fecbea423b006f3c0bd5389a3d0
| 0
|
reportportal/service-authorization,reportportal/service-authorization
|
package com.epam.reportportal.auth.config;
import com.epam.reportportal.auth.OAuthSuccessHandler;
import com.epam.reportportal.auth.ReportPortalClient;
import com.epam.reportportal.auth.ReportPortalUser;
import com.epam.reportportal.auth.basic.BasicPasswordAuthenticationProvider;
import com.epam.reportportal.auth.basic.DatabaseUserDetailsService;
import com.epam.reportportal.auth.integration.github.GitHubOAuth2UserService;
import com.epam.reportportal.auth.integration.github.GitHubUserReplicator;
import com.epam.reportportal.auth.integration.ldap.ActiveDirectoryAuthProvider;
import com.epam.reportportal.auth.integration.ldap.LdapAuthProvider;
import com.epam.reportportal.auth.integration.ldap.LdapUserReplicator;
import com.epam.reportportal.auth.store.MutableClientRegistrationRepository;
import com.epam.ta.reportportal.dao.IntegrationRepository;
import com.epam.ta.reportportal.dao.OAuthRegistrationRepository;
import com.epam.ta.reportportal.dao.OAuthRegistrationRestrictionRepository;
import com.epam.ta.reportportal.entity.project.ProjectRole;
import com.epam.ta.reportportal.entity.user.UserRole;
import com.google.common.base.Charsets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.annotation.Order;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.oauth2.client.userinfo.DelegatingOAuth2UserService;
import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
import org.springframework.security.oauth2.client.userinfo.OAuth2UserService;
import org.springframework.security.oauth2.config.annotation.configurers.ClientDetailsServiceConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configuration.AuthorizationServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableAuthorizationServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerEndpointsConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerSecurityConfigurer;
import org.springframework.security.oauth2.core.user.OAuth2User;
import org.springframework.security.oauth2.provider.token.DefaultAccessTokenConverter;
import org.springframework.security.oauth2.provider.token.DefaultTokenServices;
import org.springframework.security.oauth2.provider.token.DefaultUserAuthenticationConverter;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.security.oauth2.provider.token.store.JwtAccessTokenConverter;
import org.springframework.security.oauth2.provider.token.store.JwtTokenStore;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static com.google.common.base.Strings.isNullOrEmpty;
@Configuration
public class SecurityConfiguration {
@Configuration
@Order(4)
public static class GlobalWebSecurityConfig extends WebSecurityConfigurerAdapter {
public static final String SSO_LOGIN_PATH = "/sso/login";
@Autowired
private OAuthSuccessHandler successHandler;
@Autowired
private IntegrationRepository authConfigRepository;
@Autowired
private LdapUserReplicator ldapUserReplicator;
@Override
protected final void configure(HttpSecurity http) throws Exception {
//@formatter:off
http
.antMatcher("/**")
.authorizeRequests()
.antMatchers(SSO_LOGIN_PATH + "/**", "/epam/**", "/info", "/health", "/api-docs/**")
.permitAll()
.anyRequest()
.authenticated()
.and()
.csrf().disable()
.formLogin().disable()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.httpBasic()
.and()
.oauth2Login()
.clientRegistrationRepository(clientRegistrationRepository())
.authorizationEndpoint()
.baseUri(SSO_LOGIN_PATH)
.and()
.userInfoEndpoint()
.userService(oauth2UserService())
.and()
.successHandler(successHandler);
//@formatter:on
}
@Autowired
private OAuthRegistrationRepository oAuthRegistrationRepository;
@Bean
public MutableClientRegistrationRepository clientRegistrationRepository() {
return new MutableClientRegistrationRepository(oAuthRegistrationRepository);
}
@Autowired
private GitHubUserReplicator gitHubUserReplicator;
@Autowired
private OAuthRegistrationRestrictionRepository oAuthRegistrationRestrictionRepository;
private OAuth2UserService<OAuth2UserRequest, OAuth2User> oauth2UserService() {
List<OAuth2UserService<OAuth2UserRequest, OAuth2User>> services = new LinkedList<>();
services.add(new GitHubOAuth2UserService(gitHubUserReplicator, oAuthRegistrationRestrictionRepository));
return new DelegatingOAuth2UserService<>(services);
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.authenticationProvider(basicPasswordAuthProvider())
.authenticationProvider(activeDirectoryAuthProvider())
.authenticationProvider(ldapAuthProvider());
}
@Bean
public AuthenticationProvider activeDirectoryAuthProvider() {
return new ActiveDirectoryAuthProvider(authConfigRepository, ldapUserReplicator);
}
@Bean
public AuthenticationProvider ldapAuthProvider() {
return new LdapAuthProvider(authConfigRepository, ldapUserReplicator);
}
@Bean
protected UserDetailsService userDetailsService() {
return new DatabaseUserDetailsService();
}
@Bean
public AuthenticationProvider basicPasswordAuthProvider() {
BasicPasswordAuthenticationProvider provider = new BasicPasswordAuthenticationProvider();
provider.setUserDetailsService(userDetailsService());
provider.setPasswordEncoder(passwordEncoder());
return provider;
}
public PasswordEncoder passwordEncoder() {
return new MD5PasswordEncoder();
}
@Override
@Primary
@Bean
public AuthenticationManager authenticationManager() throws Exception {
return super.authenticationManager();
}
}
@Configuration
@Order(3)
@EnableAuthorizationServer
public static class AuthorizationServerConfiguration extends AuthorizationServerConfigurerAdapter {
private final AuthenticationManager authenticationManager;
@Autowired
public AuthorizationServerConfiguration(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
}
// @Override
// public void configure(AuthorizationServerEndpointsConfigurer endpoints) {
// endpoints
// .tokenStore(tokenStore())
// .accessTokenConverter(accessTokenConverter())
// .authenticationManager(authenticationManager);
// }
@Override
public void configure(AuthorizationServerEndpointsConfigurer endpoints) {
//@formatter:off
endpoints
.pathMapping("/oauth/token", "/sso/oauth/token")
.pathMapping("/oauth/token_key", "/sso/oauth/token_key")
.pathMapping("/oauth/check_token", "/sso/oauth/check_token")
.pathMapping("/oauth/authorize", "/sso/oauth/authorize")
.pathMapping("/oauth/confirm_access", "/sso/oauth/confirm_access")
.tokenStore(tokenStore())
// .exceptionTranslator(new OAuthErrorHandler(new ReportPortalExceptionResolver(new DefaultErrorResolver(ExceptionMappings.DEFAULT_MAPPING))))
.accessTokenConverter(accessTokenConverter())
.authenticationManager(authenticationManager);
//@formatter:on
}
@Override
public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
//@formatter:off
clients.inMemory()
.withClient(ReportPortalClient.ui.name())
.secret("{bcrypt}$2a$10$ka8W./nA2Uiqsd2uOzazdu2lMbipaMB6RJNInB1Y0NMKQzj7plsie")
.authorizedGrantTypes("refresh_token", "password")
.scopes("ui")
.accessTokenValiditySeconds((int) TimeUnit.DAYS.toSeconds(1))
.and()
.withClient(ReportPortalClient.api.name())
.secret("apiman")
.authorizedGrantTypes("password")
.scopes("api")
.accessTokenValiditySeconds(-1)
.and()
.withClient(ReportPortalClient.internal.name())
.secret("internal_man")
.authorizedGrantTypes("client_credentials").authorities("ROLE_INTERNAL")
.scopes("internal");
//@formatter:on
}
@Override
public void configure(AuthorizationServerSecurityConfigurer security) {
security.tokenKeyAccess("hasAuthority('ROLE_INTERNAL')").checkTokenAccess("hasAuthority('ROLE_INTERNAL')");
}
@Bean
public TokenStore tokenStore() {
return new JwtTokenStore(accessTokenConverter());
}
@Bean
public JwtAccessTokenConverter accessTokenConverter() {
JwtAccessTokenConverter converter = new JwtAccessTokenConverter();
converter.setSigningKey("123");
DefaultAccessTokenConverter converter1 = new DefaultAccessTokenConverter();
converter1.setUserTokenConverter(new ReportPortalAuthenticationConverter());
converter.setAccessTokenConverter(converter1);
return converter;
}
@Bean
@Primary
public DefaultTokenServices tokenServices() {
DefaultTokenServices defaultTokenServices = new DefaultTokenServices();
defaultTokenServices.setTokenStore(tokenStore());
defaultTokenServices.setSupportRefreshToken(true);
defaultTokenServices.setAuthenticationManager(authenticationManager);
return defaultTokenServices;
}
}
@Configuration
@EnableResourceServer
public static class ResourceServerAuthConfiguration extends ResourceServerConfigurerAdapter {
@Override
public void configure(HttpSecurity http) throws Exception {
http.requestMatchers()
.antMatchers("/sso/me/**", "/sso/internal/**", "/settings/**")
.and()
.authorizeRequests()
.antMatchers("/settings/**")
.hasRole("ADMINISTRATOR")
.antMatchers("/sso/internal/**")
.hasRole("INTERNAL")
.anyRequest()
.authenticated()
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS);
}
}
static class ReportPortalAuthenticationConverter extends DefaultUserAuthenticationConverter {
@Override
public Map<String, ?> convertUserAuthentication(Authentication authentication) {
@SuppressWarnings("unchecked")
Map<String, Object> claims = (Map<String, Object>) super.convertUserAuthentication(authentication);
ReportPortalUser principal = (ReportPortalUser) authentication.getPrincipal();
claims.put("userId", principal.getUserId());
claims.put("userRole", principal.getUserRole());
claims.put("projects", principal.getProjectDetails());
return claims;
}
@Override
public Authentication extractAuthentication(Map<String, ?> map) {
Authentication auth = super.extractAuthentication(map);
if (null != auth) {
UsernamePasswordAuthenticationToken user = ((UsernamePasswordAuthenticationToken) auth);
Collection<GrantedAuthority> authorities = user.getAuthorities();
Long userId = map.containsKey("userId") ? parseId(map.get("userId")) : null;
UserRole userRole = map.containsKey("userRole") ? UserRole.valueOf(map.get("userRole").toString()) : null;
Map<String, Map> projects = map.containsKey("projects") ? (Map) map.get("projects") : Collections.emptyMap();
Map<String, ReportPortalUser.ProjectDetails> collect = projects.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey,
e -> new ReportPortalUser.ProjectDetails(parseId(e.getValue().get("projectId")),
ProjectRole.valueOf((String) e.getValue().get("projectRole"))
)
));
return new UsernamePasswordAuthenticationToken(new ReportPortalUser(user.getName(),
"N/A",
authorities,
userId,
userRole,
collect
), user.getCredentials(), authorities);
}
return null;
}
private Long parseId(Object id) {
if (id instanceof Integer) {
return Long.valueOf((Integer) id);
}
return (Long) id;
}
}
public static class MD5PasswordEncoder implements PasswordEncoder {
private HashFunction hasher = Hashing.md5();
@Override
public String encode(CharSequence rawPassword) {
return hasher.newHasher().putString(rawPassword, Charsets.UTF_8).hash().toString();
}
@Override
public boolean matches(CharSequence rawPassword, String encodedPassword) {
if (isNullOrEmpty(encodedPassword)) {
return false;
}
return encodedPassword.equals(hasher.newHasher().putString(rawPassword, Charsets.UTF_8).hash().toString());
}
}
}
|
src/main/java/com/epam/reportportal/auth/config/SecurityConfiguration.java
|
package com.epam.reportportal.auth.config;
import com.epam.reportportal.auth.OAuthSuccessHandler;
import com.epam.reportportal.auth.ReportPortalClient;
import com.epam.reportportal.auth.ReportPortalUser;
import com.epam.reportportal.auth.basic.BasicPasswordAuthenticationProvider;
import com.epam.reportportal.auth.basic.DatabaseUserDetailsService;
import com.epam.reportportal.auth.integration.github.GitHubOAuth2UserService;
import com.epam.reportportal.auth.integration.github.GitHubUserReplicator;
import com.epam.reportportal.auth.integration.ldap.ActiveDirectoryAuthProvider;
import com.epam.reportportal.auth.integration.ldap.LdapAuthProvider;
import com.epam.reportportal.auth.integration.ldap.LdapUserReplicator;
import com.epam.reportportal.auth.store.MutableClientRegistrationRepository;
import com.epam.ta.reportportal.dao.IntegrationRepository;
import com.epam.ta.reportportal.dao.OAuthRegistrationRepository;
import com.epam.ta.reportportal.dao.OAuthRegistrationRestrictionRepository;
import com.epam.ta.reportportal.entity.project.ProjectRole;
import com.epam.ta.reportportal.entity.user.UserRole;
import com.google.common.base.Charsets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.annotation.Order;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.oauth2.client.userinfo.DelegatingOAuth2UserService;
import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
import org.springframework.security.oauth2.client.userinfo.OAuth2UserService;
import org.springframework.security.oauth2.config.annotation.configurers.ClientDetailsServiceConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configuration.AuthorizationServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableAuthorizationServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerEndpointsConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerSecurityConfigurer;
import org.springframework.security.oauth2.core.user.OAuth2User;
import org.springframework.security.oauth2.provider.token.DefaultAccessTokenConverter;
import org.springframework.security.oauth2.provider.token.DefaultTokenServices;
import org.springframework.security.oauth2.provider.token.DefaultUserAuthenticationConverter;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.security.oauth2.provider.token.store.JwtAccessTokenConverter;
import org.springframework.security.oauth2.provider.token.store.JwtTokenStore;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static com.google.common.base.Strings.isNullOrEmpty;
@Configuration
public class SecurityConfiguration {
@Configuration
@Order(4)
public static class GlobalWebSecurityConfig extends WebSecurityConfigurerAdapter {
public static final String SSO_LOGIN_PATH = "/sso/login";
@Autowired
private OAuthSuccessHandler successHandler;
@Autowired
private IntegrationRepository authConfigRepository;
@Autowired
private LdapUserReplicator ldapUserReplicator;
@Override
protected final void configure(HttpSecurity http) throws Exception {
//@formatter:off
http
.antMatcher("/**")
.authorizeRequests()
.antMatchers(SSO_LOGIN_PATH + "/**", "/epam/**", "/info", "/health", "/api-docs/**")
.permitAll()
.anyRequest()
.authenticated()
.and()
.csrf().disable()
.formLogin().disable()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.httpBasic()
.and()
.oauth2Login()
.clientRegistrationRepository(clientRegistrationRepository())
.authorizationEndpoint()
.baseUri(SSO_LOGIN_PATH)
.and()
.userInfoEndpoint()
.userService(oauth2UserService())
.and()
.successHandler(successHandler);
//@formatter:on
}
@Autowired
private OAuthRegistrationRepository oAuthRegistrationRepository;
@Bean
public MutableClientRegistrationRepository clientRegistrationRepository() {
return new MutableClientRegistrationRepository(oAuthRegistrationRepository);
}
@Autowired
private GitHubUserReplicator gitHubUserReplicator;
@Autowired
private OAuthRegistrationRestrictionRepository oAuthRegistrationRestrictionRepository;
private OAuth2UserService<OAuth2UserRequest, OAuth2User> oauth2UserService() {
List<OAuth2UserService<OAuth2UserRequest, OAuth2User>> services = new LinkedList<>();
services.add(new GitHubOAuth2UserService(gitHubUserReplicator, oAuthRegistrationRestrictionRepository));
return new DelegatingOAuth2UserService<>(services);
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.authenticationProvider(basicPasswordAuthProvider())
.authenticationProvider(activeDirectoryAuthProvider())
.authenticationProvider(ldapAuthProvider());
}
@Bean
public AuthenticationProvider activeDirectoryAuthProvider() {
return new ActiveDirectoryAuthProvider(authConfigRepository, ldapUserReplicator);
}
@Bean
public AuthenticationProvider ldapAuthProvider() {
return new LdapAuthProvider(authConfigRepository, ldapUserReplicator);
}
@Bean
protected UserDetailsService userDetailsService() {
return new DatabaseUserDetailsService();
}
@Bean
public AuthenticationProvider basicPasswordAuthProvider() {
BasicPasswordAuthenticationProvider provider = new BasicPasswordAuthenticationProvider();
provider.setUserDetailsService(userDetailsService());
provider.setPasswordEncoder(passwordEncoder());
return provider;
}
public PasswordEncoder passwordEncoder() {
return new MD5PasswordEncoder();
}
@Override
@Primary
@Bean
public AuthenticationManager authenticationManager() throws Exception {
return super.authenticationManager();
}
}
@Configuration
@Order(3)
@EnableAuthorizationServer
public static class AuthorizationServerConfiguration extends AuthorizationServerConfigurerAdapter {
private final AuthenticationManager authenticationManager;
@Autowired
public AuthorizationServerConfiguration(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
}
// @Override
// public void configure(AuthorizationServerEndpointsConfigurer endpoints) {
// endpoints
// .tokenStore(tokenStore())
// .accessTokenConverter(accessTokenConverter())
// .authenticationManager(authenticationManager);
// }
@Override
public void configure(AuthorizationServerEndpointsConfigurer endpoints) {
//@formatter:off
endpoints
.pathMapping("/oauth/token", "/sso/oauth/token")
.pathMapping("/oauth/token_key", "/sso/oauth/token_key")
.pathMapping("/oauth/check_token", "/sso/oauth/check_token")
.pathMapping("/oauth/authorize", "/sso/oauth/authorize")
.pathMapping("/oauth/confirm_access", "/sso/oauth/confirm_access")
.tokenStore(tokenStore())
// .exceptionTranslator(new OAuthErrorHandler(new ReportPortalExceptionResolver(new DefaultErrorResolver(ExceptionMappings.DEFAULT_MAPPING))))
.accessTokenConverter(accessTokenConverter())
.authenticationManager(authenticationManager);
//@formatter:on
}
@Override
public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
//@formatter:off
clients.inMemory()
.withClient(ReportPortalClient.ui.name())
.secret("{bcrypt}$2a$10$ka8W./nA2Uiqsd2uOzazdu2lMbipaMB6RJNInB1Y0NMKQzj7plsie")
.authorizedGrantTypes("refresh_token", "password")
.scopes("ui")
.accessTokenValiditySeconds((int) TimeUnit.DAYS.toSeconds(1))
.and()
.withClient(ReportPortalClient.api.name())
.secret("apiman")
.authorizedGrantTypes("password")
.scopes("api")
.accessTokenValiditySeconds(-1)
.and()
.withClient(ReportPortalClient.internal.name())
.secret("internal_man")
.authorizedGrantTypes("client_credentials").authorities("ROLE_INTERNAL")
.scopes("internal");
//@formatter:on
}
@Override
public void configure(AuthorizationServerSecurityConfigurer security) {
security.tokenKeyAccess("hasAuthority('ROLE_INTERNAL')").checkTokenAccess("hasAuthority('ROLE_INTERNAL')");
}
@Bean
public TokenStore tokenStore() {
return new JwtTokenStore(accessTokenConverter());
}
@Bean
public JwtAccessTokenConverter accessTokenConverter() {
JwtAccessTokenConverter converter = new JwtAccessTokenConverter();
converter.setSigningKey("123");
DefaultAccessTokenConverter converter1 = new DefaultAccessTokenConverter();
converter1.setUserTokenConverter(new ReportPortalAuthenticationConverter());
converter.setAccessTokenConverter(converter1);
return converter;
}
@Bean
@Primary
public DefaultTokenServices tokenServices() {
DefaultTokenServices defaultTokenServices = new DefaultTokenServices();
defaultTokenServices.setTokenStore(tokenStore());
defaultTokenServices.setSupportRefreshToken(true);
defaultTokenServices.setAuthenticationManager(authenticationManager);
return defaultTokenServices;
}
}
@Configuration
@EnableResourceServer
public static class ResourceServerAuthConfiguration extends ResourceServerConfigurerAdapter {
@Override
public void configure(HttpSecurity http) throws Exception {
http.requestMatchers()
.antMatchers("/sso/me/**", "/sso/internal/**", "/settings/**")
.and()
.authorizeRequests()
.antMatchers("/settings/**")
.hasRole("ADMINISTRATOR")
.antMatchers("/sso/internal/**")
.hasRole("INTERNAL")
.anyRequest()
.authenticated()
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS);
}
}
static class ReportPortalAuthenticationConverter extends DefaultUserAuthenticationConverter {
@Override
public Map<String, ?> convertUserAuthentication(Authentication authentication) {
@SuppressWarnings("unchecked")
Map<String, Object> claims = (Map<String, Object>) super.convertUserAuthentication(authentication);
ReportPortalUser principal = (ReportPortalUser) authentication.getPrincipal();
claims.put("userId", principal.getUserId());
claims.put("userRole", principal.getUserRole());
claims.put("projects", principal.getProjectDetails());
return claims;
}
@Override
public Authentication extractAuthentication(Map<String, ?> map) {
Authentication auth = super.extractAuthentication(map);
if (null != auth) {
UsernamePasswordAuthenticationToken user = ((UsernamePasswordAuthenticationToken) auth);
Collection<GrantedAuthority> authorities = user.getAuthorities();
Long userId = map.containsKey("userId") ? parseId(map.get("userId")) : null;
UserRole userRole = map.containsKey("userRole") ? UserRole.valueOf(map.get("userRole").toString()) : null;
Map<String, Map> projects = map.containsKey("projects") ? (Map) map.get("projects") : Collections.emptyMap();
Map<String, ReportPortalUser.ProjectDetails> collect = projects.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey,
e -> new ReportPortalUser.ProjectDetails(parseId(e.getValue().get("projectId")),
ProjectRole.valueOf((String) e.getValue().get("projectRole"))
)
));
return new UsernamePasswordAuthenticationToken(new ReportPortalUser(user.getName(),
"N/A",
authorities,
userId,
userRole,
collect
), user.getCredentials(), authorities);
}
return null;
}
private Long parseId(Object id) {
if (id instanceof Integer) {
return Long.valueOf((Integer) id);
}
return (Long) id;
}
}
public static class MD5PasswordEncoder implements PasswordEncoder {
private HashFunction hasher = Hashing.md5();
@Override
public String encode(CharSequence rawPassword) {
return hasher.newHasher().putString(rawPassword, Charsets.UTF_8).hash().toString();
}
@Override
public boolean matches(CharSequence rawPassword, String encodedPassword) {
if (isNullOrEmpty(encodedPassword)) {
return false;
}
return encodedPassword.equals(hasher.newHasher().putString(rawPassword, Charsets.UTF_8).hash().toString());
}
}
}
|
code style refactoring
|
src/main/java/com/epam/reportportal/auth/config/SecurityConfiguration.java
|
code style refactoring
|
|
Java
|
apache-2.0
|
18418da6462ec716d8d53e75945783649f2e020e
| 0
|
watson-developer-cloud/java-sdk,watson-developer-cloud/java-sdk,watson-developer-cloud/java-sdk,watson-developer-cloud/java-sdk
|
/*
* (C) Copyright IBM Corp. 2019, 2021.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.ibm.watson.assistant.v1;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.ibm.cloud.sdk.core.http.Response;
import com.ibm.cloud.sdk.core.http.ServiceCallback;
import com.ibm.cloud.sdk.core.security.BasicAuthenticator;
import com.ibm.cloud.sdk.core.service.exception.NotFoundException;
import com.ibm.cloud.sdk.core.service.exception.UnauthorizedException;
import com.ibm.watson.assistant.v1.model.*;
import com.ibm.watson.common.RetryRunner;
import io.reactivex.Single;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Integration test for the {@link Assistant}. */
@RunWith(RetryRunner.class)
public class AssistantServiceIT extends AssistantServiceTest {
private String exampleIntent;
private Assistant service;
private String workspaceId;
private DateFormat isoDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
/**
* Sets up the tests.
*
* @throws Exception the exception
*/
@Override
@Before
public void setUp() throws Exception {
super.setUp();
this.service = getService();
this.workspaceId = getWorkspaceId();
}
/** Test README. */
@Test
public void testReadme() {
MessageInput input = new MessageInput();
input.setText("Hi");
MessageOptions options = new MessageOptions.Builder(workspaceId).input(input).build();
MessageResponse response = service.message(options).execute().getResult();
assertNotNull(response);
}
/** Test RuntimeResponseGenericRuntimeResponseTypeText. */
@Test
public void testRuntimeResponseGenericRuntimeResponseTypeText() {
MessageInput input = new MessageInput();
input.setText("Hi");
MessageOptions options = new MessageOptions.Builder(workspaceId).input(input).build();
MessageResponse response = service.message(options).execute().getResult();
System.out.println(response);
RuntimeResponseGenericRuntimeResponseTypeText
runtimeResponseGenericRuntimeResponseTypeText =
(RuntimeResponseGenericRuntimeResponseTypeText)
response.getOutput().getGeneric().get(0);
assertNotNull(runtimeResponseGenericRuntimeResponseTypeText);
}
/** Test RuntimeResponseGenericRuntimeResponseTypeChannelTransfer. */
@Test
public void testRuntimeResponseGenericRuntimeResponseTypeChannelTransfer() {
MessageInput input = new MessageInput();
input.setText("test sdk");
MessageOptions options = new MessageOptions.Builder(workspaceId).input(input).build();
MessageResponse response = service.message(options).execute().getResult();
System.out.println(response);
RuntimeResponseGenericRuntimeResponseTypeChannelTransfer
runtimeResponseGenericRuntimeResponseTypeChannelTransfer =
(RuntimeResponseGenericRuntimeResponseTypeChannelTransfer)
response.getOutput().getGeneric().get(0);
ChannelTransferInfo channelTransferInfo =
runtimeResponseGenericRuntimeResponseTypeChannelTransfer.transferInfo();
assertNotNull(channelTransferInfo);
}
/** Test RuntimeResponseGenericRuntimeResponseTypeChannelTransfer. */
@Test
public void testRuntimeResponseGenericRuntimeResponseTypeChannelTransferRequest() {
MessageInput input = new MessageInput();
input.setText("test sdk");
ChannelTransferTargetChat channelTransferTargetChat = new ChannelTransferTargetChat.Builder()
.url("google.com").build();
ChannelTransferTarget transferTarget = new ChannelTransferTarget.Builder()
.chat(channelTransferTargetChat).build();
ChannelTransferInfo channelTransferInfo = new ChannelTransferInfo.Builder()
.target(transferTarget).build();
RuntimeResponseGenericRuntimeResponseTypeChannelTransfer testTransfer =
new RuntimeResponseGenericRuntimeResponseTypeChannelTransfer.Builder()
.transferInfo(channelTransferInfo)
.responseType("channel_transfer")
.messageToUser("testing message").build();
ArrayList<LogMessage> list = new ArrayList<LogMessage>();
ArrayList<String> listString = new ArrayList<>();
OutputData outputData = new OutputData.Builder()
.addGeneric(testTransfer)
.logMessages(list)
.text(listString).build();
MessageOptions options = new MessageOptions.Builder(workspaceId)
.input(input)
.output(outputData)
.build();
MessageResponse response = service.message(options).execute().getResult();
System.out.println(response);
RuntimeResponseGenericRuntimeResponseTypeChannelTransfer
runtimeResponseGenericRuntimeResponseTypeChannelTransfer =
(RuntimeResponseGenericRuntimeResponseTypeChannelTransfer)
response.getOutput().getGeneric().get(0);
// ChannelTransferInfo channelTransferInfo =
// runtimeResponseGenericRuntimeResponseTypeChannelTransfer.transferInfo();
//
// assertNull(channelTransferInfo);
}
/**
* Test Example.
*
* @throws InterruptedException the interrupted exception
*/
@Test
public void testExample() throws InterruptedException {
MessageInput input = new MessageInput();
input.setText("Hi");
MessageOptions options = new MessageOptions.Builder(workspaceId).input(input).build();
// sync
MessageResponse response = service.message(options).execute().getResult();
System.out.println(response);
// async
service
.message(options)
.enqueue(
new ServiceCallback<MessageResponse>() {
@Override
public void onResponse(Response<MessageResponse> response) {
System.out.println(response.getResult());
}
@Override
public void onFailure(Exception e) {}
});
// reactive
Single<Response<MessageResponse>> observableRequest =
service.message(options).reactiveRequest();
observableRequest
.subscribeOn(Schedulers.single())
.subscribe(
new Consumer<Response<MessageResponse>>() {
@Override
public void accept(Response<MessageResponse> response) throws Exception {
System.out.println(response.getResult());
}
});
Thread.sleep(5000);
}
/** Ping bad credentials throws exception. */
@Test(expected = UnauthorizedException.class)
public void pingBadCredentialsThrowsException() {
Assistant badService = new Assistant("2019-02-28", new BasicAuthenticator("foo", "bar"));
MessageOptions options = new MessageOptions.Builder(workspaceId).build();
badService.message(options).execute().getResult();
}
/** Test start a conversation without message. */
@Test()
public void testStartAConversationWithoutMessage() {
MessageOptions options = new MessageOptions.Builder(workspaceId).build();
service.message(options).execute().getResult();
}
/**
* Test send messages.
*
* @throws InterruptedException the interrupted exception
*/
@Test
public void testSendMessages() throws InterruptedException {
final String[] messages = new String[] {"turn ac on", "turn right", "no", "yes"};
Context context = null;
MessageInput input = new MessageInput();
for (final String message : messages) {
input.setText(message);
MessageOptions request =
new MessageOptions.Builder(workspaceId)
.input(input)
.alternateIntents(true)
.context(context)
.nodesVisitedDetails(true)
.build();
if (message.equals("yes")) {
RuntimeIntent offTopic =
new RuntimeIntent.Builder().intent("off_topic").confidence(1.0).build();
request = request.newBuilder().addIntent(offTopic).build();
}
MessageResponse response = service.message(request).execute().getResult();
assertMessageFromService(response);
assertNotNull(response.getOutput().getNodesVisitedDetails());
context = new Context();
for (String propName : response.getContext().getPropertyNames()) {
context.put(propName, response.getContext().get(propName));
}
Thread.sleep(500);
}
}
/**
* Assert {@link MessageResponse} from service.
*
* @param message the message from the {@link Assistant}
*/
private void assertMessageFromService(MessageResponse message) {
assertNotNull(message);
assertNotNull(message.getEntities());
assertNotNull(message.getIntents());
}
/** Test message with null. */
@Test(expected = IllegalArgumentException.class)
public void testMessageWithNull() {
service.message(null).execute().getResult();
}
/** Test to string. */
@Test
public void testToString() {
assertNotNull(service.toString());
}
/** Test createCounterexample. */
@Test
public void testCreateCounterexample() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
Counterexample response = service.createCounterexample(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), counterExampleText);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
}
}
/** Test deleteCounterexample. */
@Test
public void testDeleteCounterexample() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
try {
GetCounterexampleOptions getOptions =
new GetCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.getCounterexample(getOptions).execute();
fail("deleteCounterexample failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getCounterexample. */
@Test
public void testGetCounterexample() {
Date start = new Date();
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
try {
GetCounterexampleOptions getOptions =
new GetCounterexampleOptions.Builder(workspaceId, counterExampleText)
.includeAudit(true)
.build();
Counterexample response = service.getCounterexample(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), counterExampleText);
assertNotNull(response.created());
assertNotNull(response.updated());
Date now = new Date();
assertTrue(fuzzyBefore(response.created(), now));
assertTrue(fuzzyAfter(response.created(), start));
assertTrue(fuzzyBefore(response.updated(), now));
assertTrue(fuzzyAfter(response.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
}
}
/** Test listCounterexamples. */
@Test
public void testListCounterexamples() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
try {
ListCounterexamplesOptions listOptions =
new ListCounterexamplesOptions.Builder(workspaceId).build();
CounterexampleCollection ccResponse =
service.listCounterexamples(listOptions).execute().getResult();
assertNotNull(ccResponse);
assertNotNull(ccResponse.getCounterexamples());
assertNotNull(ccResponse.getPagination());
assertNotNull(ccResponse.getPagination().getRefreshUrl());
// nextUrl may be null
Date start = new Date();
// Now add a counterexample and make sure we get it back
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
long count = ccResponse.getCounterexamples().size();
CounterexampleCollection ccResponse2 =
service
.listCounterexamples(
listOptions.newBuilder().pageLimit(count + 1).includeAudit(true).build())
.execute()
.getResult();
assertNotNull(ccResponse2);
assertNotNull(ccResponse2.getCounterexamples());
List<Counterexample> counterexamples = ccResponse2.getCounterexamples();
assertTrue(counterexamples.size() > count);
Counterexample exResponse = null;
for (Counterexample resp : counterexamples) {
if (resp.text().equals(counterExampleText)) {
exResponse = resp;
break;
}
}
assertNotNull(exResponse);
Date now = new Date();
assertTrue(fuzzyBefore(exResponse.created(), now));
assertTrue(fuzzyAfter(exResponse.created(), start));
assertTrue(fuzzyBefore(exResponse.updated(), now));
assertTrue(fuzzyAfter(exResponse.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
try {
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
} catch (NotFoundException ex) {
// Okay
}
}
}
/** Test listCounterexamples with paging. */
@Test
public void testListCounterexamplesWithPaging() {
String counterExampleText1 = "alpha" + UUID.randomUUID().toString(); // gotta be unique
String counterExampleText2 = "zeta" + UUID.randomUUID().toString(); // gotta be unique
// Add two counterexamples
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText1).build();
service.createCounterexample(createOptions).execute().getResult();
service
.createCounterexample(createOptions.newBuilder().text(counterExampleText2).build())
.execute()
.getResult();
try {
ListCounterexamplesOptions listOptions =
new ListCounterexamplesOptions.Builder(workspaceId).pageLimit(1L).sort("text").build();
CounterexampleCollection response =
service.listCounterexamples(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false;
boolean found2 = false;
while (true) {
assertNotNull(response.getCounterexamples());
assertTrue(response.getCounterexamples().size() == 1);
found1 |= response.getCounterexamples().get(0).text().equals(counterExampleText1);
found2 |= response.getCounterexamples().get(0).text().equals(counterExampleText2);
assertTrue(found1 || !found2); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listCounterexamples(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText1).build();
service.deleteCounterexample(deleteOptions).execute();
service
.deleteCounterexample(deleteOptions.newBuilder().text(counterExampleText2).build())
.execute();
}
}
/** Test updateCounterexample. */
@Test
public void testUpdateCounterexample() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
String counterExampleText2 =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
try {
UpdateCounterexampleOptions updateOptions =
new UpdateCounterexampleOptions.Builder(workspaceId, counterExampleText)
.newText(counterExampleText2)
.build();
Counterexample response = service.updateCounterexample(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), counterExampleText2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText2).build();
service.deleteCounterexample(deleteOptions).execute();
}
}
/** Creates the example intent. */
public void createExampleIntent() {
exampleIntent = "Hello";
try {
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, exampleIntent)
.description("Example Intent")
.build();
service.createIntent(createOptions).execute().getResult();
} catch (Exception ex) {
// Exception is okay if is for Unique Violation
assertTrue(ex.getLocalizedMessage().startsWith("Unique Violation"));
}
}
/** Test createExample. */
@Test
public void testCreateExample() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
Example response = service.createExample(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), exampleText);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test deleteExample. */
@Test
public void testDeleteExample() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
try {
GetExampleOptions getOptions =
new GetExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.getExample(getOptions).execute().getResult();
fail("deleteCounterexample failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getExample. */
@Test
public void testGetExample() {
createExampleIntent();
Date start = new Date();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
try {
GetExampleOptions getOptions =
new GetExampleOptions.Builder(workspaceId, exampleIntent, exampleText)
.includeAudit(true)
.build();
Example response = service.getExample(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), exampleText);
assertNotNull(response.created());
assertNotNull(response.updated());
Date now = new Date();
assertTrue(fuzzyBefore(response.created(), now));
assertTrue(fuzzyAfter(response.created(), start));
assertTrue(fuzzyBefore(response.updated(), now));
assertTrue(fuzzyAfter(response.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test listExamples. */
@Test
public void testListExamples() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
try {
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, exampleIntent).includeAudit(true).build();
ExampleCollection ecResponse = service.listExamples(listOptions).execute().getResult();
assertNotNull(ecResponse);
assertNotNull(ecResponse.getExamples());
assertNotNull(ecResponse.getPagination());
assertNotNull(ecResponse.getPagination().getRefreshUrl());
// nextUrl may be null
Date start = new Date();
// Now add an example and make sure we get it back
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
long count = ecResponse.getExamples().size();
ExampleCollection ecResponse2 =
service
.listExamples(
listOptions.newBuilder().pageLimit(count + 1).includeAudit(true).build())
.execute()
.getResult();
assertNotNull(ecResponse2);
assertNotNull(ecResponse2.getExamples());
List<Example> examples = ecResponse2.getExamples();
assertTrue(examples.size() > count);
Example exResponse = null;
for (Example resp : examples) {
if (resp.text().equals(exampleText)) {
exResponse = resp;
break;
}
}
assertNotNull(exResponse);
Date now = new Date();
assertTrue(fuzzyBefore(exResponse.created(), now));
assertTrue(fuzzyAfter(exResponse.created(), start));
assertTrue(fuzzyBefore(exResponse.updated(), now));
assertTrue(fuzzyAfter(exResponse.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test listExamples with paging. */
@Test
public void testListExamplesWithPaging() {
createExampleIntent();
String exampleText1 = "Alpha " + UUID.randomUUID().toString(); // gotta be unique
String exampleText2 = "Zeta " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText1).build();
service.createExample(createOptions).execute().getResult();
service
.createExample(createOptions.newBuilder().text(exampleText2).build())
.execute()
.getResult();
try {
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, exampleIntent)
.pageLimit(1L)
.sort("-text")
.build();
ExampleCollection response = service.listExamples(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getExamples());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false;
boolean found2 = false;
while (true) {
assertNotNull(response.getExamples());
assertTrue(response.getExamples().size() == 1);
found1 |= response.getExamples().get(0).text().equals(exampleText1);
found2 |= response.getExamples().get(0).text().equals(exampleText2);
assertTrue(found2 || !found1); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listExamples(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText1).build();
service.deleteExample(deleteOptions).execute();
service.deleteExample(deleteOptions.newBuilder().text(exampleText2).build()).execute();
}
}
/** Test updateExample. */
@Test
public void testUpdateExample() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
String exampleText2 = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
try {
UpdateExampleOptions updateOptions =
new UpdateExampleOptions.Builder(workspaceId, exampleIntent, exampleText)
.newText(exampleText2)
.build();
Example response = service.updateExample(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), exampleText2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText2).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test createIntent. */
@Test
public void testCreateIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
Date start = new Date();
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
Intent response = service.createIntent(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.getIntent());
assertEquals(response.getIntent(), intentName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), intentDescription);
Date now = new Date();
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, intentName).includeAudit(true).build();
ExampleCollection ecResponse = service.listExamples(listOptions).execute().getResult();
assertNotNull(ecResponse);
assertNotNull(ecResponse.getExamples());
List<Example> examples = ecResponse.getExamples();
assertTrue(examples.size() == 1);
assertEquals(examples.get(0).text(), intentExample);
assertTrue(fuzzyBefore(examples.get(0).created(), now));
assertTrue(fuzzyAfter(examples.get(0).created(), start));
assertTrue(fuzzyBefore(examples.get(0).updated(), now));
assertTrue(fuzzyAfter(examples.get(0).updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test deleteIntent. */
@Test
public void testDeleteIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName).build();
service.createIntent(createOptions).execute().getResult();
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
try {
GetIntentOptions getOptions = new GetIntentOptions.Builder(workspaceId, intentName).build();
service.getIntent(getOptions).execute();
fail("deleteIntent failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getIntent. */
@Test
public void testGetIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
Date start = new Date();
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder()
.workspaceId(workspaceId)
.intent(intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
service.createIntent(createOptions).execute().getResult();
try {
GetIntentOptions getOptions =
new GetIntentOptions.Builder()
.workspaceId(workspaceId)
.intent(intentName)
.export(true)
.includeAudit(true)
.build();
Intent response = service.getIntent(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntent());
assertEquals(response.getIntent(), intentName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), intentDescription);
assertNotNull(response.getExamples());
assertNotNull(response.getCreated());
assertNotNull(response.getUpdated());
Date now = new Date();
assertTrue(fuzzyBefore(response.getCreated(), now));
assertTrue(fuzzyAfter(response.getCreated(), start));
assertTrue(fuzzyBefore(response.getUpdated(), now));
assertTrue(fuzzyAfter(response.getUpdated(), start));
List<Example> examples = response.getExamples();
assertTrue(examples.size() == 1);
assertEquals(examples.get(0).text(), intentExample);
assertTrue(fuzzyBefore(examples.get(0).created(), now));
assertTrue(fuzzyAfter(examples.get(0).created(), start));
assertTrue(fuzzyBefore(examples.get(0).updated(), now));
assertTrue(fuzzyAfter(examples.get(0).updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test listIntents. */
@Test
public void testListIntents() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
try {
ListIntentsOptions listOptions =
new ListIntentsOptions.Builder(workspaceId).includeAudit(true).build();
IntentCollection response = service.listIntents(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntents());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
// nextUrl may be null
// Now add an intent and make sure we get it back
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
Date start = new Date();
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
service.createIntent(createOptions).execute().getResult();
long count = response.getIntents().size();
ListIntentsOptions listOptions2 =
new ListIntentsOptions.Builder(workspaceId)
.export(true)
.pageLimit(count + 1)
.includeAudit(true)
.build();
IntentCollection response2 = service.listIntents(listOptions2).execute().getResult();
assertNotNull(response2);
assertNotNull(response2.getIntents());
List<Intent> intents = response2.getIntents();
assertTrue(intents.size() > count);
Intent ieResponse = null;
for (Intent resp : intents) {
if (resp.getIntent().equals(intentName)) {
ieResponse = resp;
break;
}
}
assertNotNull(ieResponse);
assertNotNull(ieResponse.getDescription());
assertEquals(ieResponse.getDescription(), intentDescription);
assertNotNull(ieResponse.getExamples());
assertTrue(ieResponse.getExamples().size() == 1);
assertEquals(ieResponse.getExamples().get(0).text(), intentExample);
Date now = new Date();
assertTrue(fuzzyBefore(ieResponse.getCreated(), now));
assertTrue(fuzzyAfter(ieResponse.getCreated(), start));
assertTrue(fuzzyBefore(ieResponse.getUpdated(), now));
assertTrue(fuzzyAfter(ieResponse.getUpdated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test listIntents with paging. */
@Test
public void testListIntentsWithPaging() {
String intentName1 = "First" + UUID.randomUUID().toString(); // gotta be unique
String intentName2 = "Second" + UUID.randomUUID().toString(); // gotta be unique
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName1).build();
service.createIntent(createOptions).execute().getResult();
service
.createIntent(createOptions.newBuilder().intent(intentName2).build())
.execute()
.getResult();
try {
ListIntentsOptions listOptions =
new ListIntentsOptions.Builder()
.workspaceId(workspaceId)
.export(true)
.pageLimit(1L)
.sort("modified")
.includeAudit(true)
.build();
IntentCollection response = service.listIntents(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntents());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false;
boolean found2 = false;
while (true) {
assertNotNull(response.getIntents());
assertTrue(response.getIntents().size() == 1);
found1 |= response.getIntents().get(0).getIntent().equals(intentName1);
found2 |= response.getIntents().get(0).getIntent().equals(intentName2);
assertTrue(found1 || !found2); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listIntents(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName1).build();
service.deleteIntent(deleteOptions).execute();
service.deleteIntent(deleteOptions.newBuilder().intent(intentName2).build()).execute();
}
}
/** Test updateIntent. */
@Test
public void testUpdateIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
service.createIntent(createOptions).execute().getResult();
try {
String intentDescription2 = "Updated description of " + intentName;
String intentExample2 = "Updated Example of " + intentName;
List<Example> intentExamples2 = new ArrayList<>();
intentExamples2.add(new Example.Builder().text(intentExample2).build());
Date start = new Date();
UpdateIntentOptions updateOptions =
new UpdateIntentOptions.Builder(workspaceId, intentName)
.newDescription(intentDescription2)
.newExamples(intentExamples2)
.build();
Intent response = service.updateIntent(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntent());
assertEquals(response.getIntent(), intentName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), intentDescription2);
Date now = new Date();
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, intentName).includeAudit(true).build();
ExampleCollection ecResponse = service.listExamples(listOptions).execute().getResult();
assertNotNull(ecResponse);
assertNotNull(ecResponse.getExamples());
List<Example> examples = ecResponse.getExamples();
assertTrue(examples.size() == 1);
assertEquals(examples.get(0).text(), intentExample2);
assertTrue(fuzzyBefore(examples.get(0).created(), now));
assertTrue(fuzzyAfter(examples.get(0).created(), start));
assertTrue(fuzzyBefore(examples.get(0).updated(), now));
assertTrue(fuzzyAfter(examples.get(0).updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test createWorkspace. */
@Test
public void testCreateWorkspace() {
String workspaceName = "API Test " + UUID.randomUUID().toString(); // gotta be unique
String workspaceDescription = "Description of " + workspaceName;
String workspaceLanguage = "en";
// metadata
Map<String, Object> workspaceMetadata = new HashMap<String, Object>();
String metadataValue = "value for " + workspaceName;
workspaceMetadata.put("key", metadataValue);
// intents
List<CreateIntent> workspaceIntents = new ArrayList<CreateIntent>();
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
workspaceIntents.add(
new CreateIntent.Builder()
.intent(intentName)
.description(intentDescription)
.examples(intentExamples)
.build());
// entities
List<CreateEntity> workspaceEntities = new ArrayList<CreateEntity>();
String entityName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String entityDescription = "Description of " + entityName;
String entityValue = "Value of " + entityName;
String entityValueSynonym = "Synonym for Value of " + entityName;
List<CreateValue> entityValues = new ArrayList<CreateValue>();
entityValues.add(
new CreateValue.Builder().value(entityValue).addSynonym(entityValueSynonym).build());
workspaceEntities.add(
new CreateEntity.Builder()
.entity(entityName)
.description(entityDescription)
.values(entityValues)
.build());
// counterexamples
List<Counterexample> workspaceCounterExamples = new ArrayList<>();
String counterExampleText = "Counterexample for " + workspaceName;
workspaceCounterExamples.add(new Counterexample.Builder().text(counterExampleText).build());
// systemSettings
WorkspaceSystemSettingsDisambiguation disambiguation =
new WorkspaceSystemSettingsDisambiguation.Builder()
.enabled(true)
.noneOfTheAbovePrompt("none of the above")
.prompt("prompt")
.sensitivity(WorkspaceSystemSettingsDisambiguation.Sensitivity.HIGH)
.build();
WorkspaceSystemSettingsTooling tooling =
new WorkspaceSystemSettingsTooling.Builder().storeGenericResponses(true).build();
WorkspaceSystemSettings systemSettings =
new WorkspaceSystemSettings.Builder()
.disambiguation(disambiguation)
.tooling(tooling)
.build();
// webhooks
String webhookHeaderName = "Webhook-Header";
String webhookHeaderValue = "webhook_header_value";
String webhookName = "java-sdk-test-webhook";
String webhookUrl = "https://github.com/watson-developer-cloud/java-sdk";
WebhookHeader webhookHeader =
new WebhookHeader.Builder().name(webhookHeaderName).value(webhookHeaderValue).build();
Webhook webhook =
new Webhook.Builder().name(webhookName).url(webhookUrl).addHeaders(webhookHeader).build();
CreateWorkspaceOptions createOptions =
new CreateWorkspaceOptions.Builder()
.name(workspaceName)
.description(workspaceDescription)
.language(workspaceLanguage)
.metadata(workspaceMetadata)
.intents(workspaceIntents)
.entities(workspaceEntities)
.counterexamples(workspaceCounterExamples)
.systemSettings(systemSettings)
.addWebhooks(webhook)
.build();
String workspaceId = null;
try {
Workspace response = service.createWorkspace(createOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getWorkspaceId());
workspaceId = response.getWorkspaceId();
assertNotNull(response.getName());
assertEquals(response.getName(), workspaceName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), workspaceDescription);
assertNotNull(response.getLanguage());
assertEquals(response.getLanguage(), workspaceLanguage);
// metadata
assertNotNull(response.getMetadata());
assertNotNull(response.getMetadata().get("key"));
assertEquals(response.getMetadata().get("key"), metadataValue);
GetWorkspaceOptions getOptions =
new GetWorkspaceOptions.Builder(workspaceId).export(true).build();
Workspace exResponse = service.getWorkspace(getOptions).execute().getResult();
assertNotNull(exResponse);
// intents
assertNotNull(exResponse.getIntents());
assertTrue(exResponse.getIntents().size() == 1);
assertNotNull(exResponse.getIntents().get(0).getIntent());
assertEquals(exResponse.getIntents().get(0).getIntent(), intentName);
assertNotNull(exResponse.getIntents().get(0).getDescription());
assertEquals(exResponse.getIntents().get(0).getDescription(), intentDescription);
assertNotNull(exResponse.getIntents().get(0).getExamples());
assertTrue(exResponse.getIntents().get(0).getExamples().size() == 1);
assertNotNull(exResponse.getIntents().get(0).getExamples().get(0));
assertNotNull(exResponse.getIntents().get(0).getExamples().get(0).text());
assertEquals(exResponse.getIntents().get(0).getExamples().get(0).text(), intentExample);
// entities
assertNotNull(exResponse.getEntities());
assertTrue(exResponse.getEntities().size() == 1);
assertNotNull(exResponse.getEntities().get(0).getEntity());
assertEquals(exResponse.getEntities().get(0).getEntity(), entityName);
assertNotNull(exResponse.getEntities().get(0).getDescription());
assertEquals(exResponse.getEntities().get(0).getDescription(), entityDescription);
assertNotNull(exResponse.getEntities().get(0).getValues());
assertTrue(exResponse.getEntities().get(0).getValues().size() == 1);
assertNotNull(exResponse.getEntities().get(0).getValues().get(0).value());
assertEquals(exResponse.getEntities().get(0).getValues().get(0).value(), entityValue);
assertNotNull(exResponse.getEntities().get(0).getValues().get(0).synonyms());
assertTrue(exResponse.getEntities().get(0).getValues().get(0).synonyms().size() == 1);
assertEquals(
exResponse.getEntities().get(0).getValues().get(0).synonyms().get(0), entityValueSynonym);
// counterexamples
assertNotNull(exResponse.getCounterexamples());
assertTrue(exResponse.getCounterexamples().size() == 1);
assertNotNull(exResponse.getCounterexamples().get(0).text());
assertEquals(exResponse.getCounterexamples().get(0).text(), counterExampleText);
// systemSettings
assertNotNull(exResponse.getSystemSettings());
assertEquals(
exResponse.getSystemSettings().disambiguation().noneOfTheAbovePrompt(),
disambiguation.noneOfTheAbovePrompt());
assertEquals(
exResponse.getSystemSettings().disambiguation().sensitivity(),
disambiguation.sensitivity());
assertEquals(
exResponse.getSystemSettings().disambiguation().prompt(), disambiguation.prompt());
assertEquals(
exResponse.getSystemSettings().disambiguation().enabled(), disambiguation.enabled());
assertEquals(
exResponse.getSystemSettings().tooling().storeGenericResponses(),
tooling.storeGenericResponses());
// webhooks
assertNotNull(exResponse.getWebhooks());
assertEquals(webhookName, exResponse.getWebhooks().get(0).name());
assertEquals(webhookUrl, exResponse.getWebhooks().get(0).url());
assertEquals(webhookHeaderName, exResponse.getWebhooks().get(0).headers().get(0).name());
assertEquals(webhookHeaderValue, exResponse.getWebhooks().get(0).headers().get(0).value());
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
if (workspaceId != null) {
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
}
}
}
/** Test deleteWorkspace. */
@Test
public void testDeleteWorkspace() {
CreateWorkspaceOptions createOptions = new CreateWorkspaceOptions.Builder().build();
String workspaceId = null;
try {
Workspace response = service.createWorkspace(createOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getWorkspaceId());
workspaceId = response.getWorkspaceId();
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
GetWorkspaceOptions getOptions =
new GetWorkspaceOptions.Builder(workspaceId).export(true).build();
service.getWorkspace(getOptions).execute().getResult();
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
workspaceId = null;
} finally {
// Clean up
if (workspaceId != null) {
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
}
}
}
/** Test getWorkspace. */
@Test
public void testGetWorkspace() {
GetWorkspaceOptions getOptions =
new GetWorkspaceOptions.Builder(workspaceId).export(false).includeAudit(true).build();
Workspace response = service.getWorkspace(getOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.getWorkspaceId());
assertEquals(response.getWorkspaceId(), workspaceId);
assertNotNull(response.getName());
assertNotNull(response.getLanguage());
Date now = new Date();
assertNotNull(response.getCreated());
assertNotNull(response.getUpdated());
assertTrue(fuzzyBefore(response.getCreated(), now));
assertTrue(fuzzyBefore(response.getUpdated(), now));
// metadata, intents, entities, dialogNodes, and counterexamples could be null
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test listWorkspaces. */
@Test
public void testListWorkspaces() {
ListWorkspacesOptions listOptions = new ListWorkspacesOptions.Builder().build();
WorkspaceCollection response = service.listWorkspaces(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getWorkspaces());
assertTrue(response.getWorkspaces().size() > 0);
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
Workspace wResponse = null;
for (Workspace resp : response.getWorkspaces()) {
if (resp.getWorkspaceId().equals(workspaceId)) {
wResponse = resp;
break;
}
}
assertNotNull(wResponse);
assertNotNull(wResponse.getName());
}
/** Test listWorkspaces with paging. */
@Test
public void testListWorkspacesWithPaging() {
ListWorkspacesOptions listOptions =
new ListWorkspacesOptions.Builder().pageLimit(1L).sort("-updated").build();
WorkspaceCollection response = service.listWorkspaces(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
boolean found = false;
while (true) {
assertNotNull(response.getWorkspaces());
assertTrue(response.getWorkspaces().size() == 1);
found |= response.getWorkspaces().get(0).getWorkspaceId().equals(workspaceId);
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listWorkspaces(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found);
}
/** Test updateWorkspace. */
@Test
public void testUpdateWorkspace() {
String workspaceName = "testUpdateWorkspace";
String workspaceDescription = "Description for testUpdateWorkspace";
// intents
CreateIntent intent0 = new CreateIntent.Builder("Hello").build();
CreateIntent intent1 = new CreateIntent.Builder("Goodbye").build();
// entities
CreateEntity entity0 = new CreateEntity.Builder("animal").build();
CreateEntity entity1 = new CreateEntity.Builder("beverage").build();
// counterexamples
Counterexample counterexample0 = new Counterexample.Builder("What are you wearing?").build();
Counterexample counterexample1 = new Counterexample.Builder("What are you eating?").build();
CreateWorkspaceOptions createOptions =
new CreateWorkspaceOptions.Builder()
.name(workspaceName)
.description(workspaceDescription)
.addIntent(intent0)
.addIntent(intent1)
.addEntity(entity0)
.addEntity(entity1)
.addCounterexample(counterexample0)
.addCounterexample(counterexample1)
.build();
String workspaceId = null;
try {
Workspace createResponse = service.createWorkspace(createOptions).execute().getResult();
assertNotNull(createResponse);
assertNotNull(createResponse.getWorkspaceId());
workspaceId = createResponse.getWorkspaceId();
String counterExampleText = "What are you drinking";
Counterexample counterexample2 = new Counterexample.Builder(counterExampleText).build();
// webhooks
String webhookHeaderName = "Webhook-Header";
String webhookHeaderValue = "webhook_header_value";
String webhookName = "java-sdk-test-webhook";
String webhookUrl = "https://github.com/watson-developer-cloud/java-sdk";
WebhookHeader webhookHeader =
new WebhookHeader.Builder().name(webhookHeaderName).value(webhookHeaderValue).build();
Webhook webhook =
new Webhook.Builder().name(webhookName).url(webhookUrl).addHeaders(webhookHeader).build();
UpdateWorkspaceOptions updateOptions =
new UpdateWorkspaceOptions.Builder(workspaceId)
.addCounterexample(counterexample2)
.append(false)
.addWebhooks(webhook)
.build();
Workspace updateResponse = service.updateWorkspace(updateOptions).execute().getResult();
assertNotNull(updateResponse);
GetCounterexampleOptions getOptions =
new GetCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
Counterexample eResponse = service.getCounterexample(getOptions).execute().getResult();
assertNotNull(eResponse);
assertNotNull(eResponse.text());
assertEquals(eResponse.text(), counterExampleText);
// webhooks
assertNotNull(updateResponse.getWebhooks());
assertEquals(webhookName, updateResponse.getWebhooks().get(0).name());
assertEquals(webhookUrl, updateResponse.getWebhooks().get(0).url());
assertEquals(webhookHeaderName, updateResponse.getWebhooks().get(0).headers().get(0).name());
assertEquals(
webhookHeaderValue, updateResponse.getWebhooks().get(0).headers().get(0).value());
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
if (workspaceId != null) {
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
}
}
}
/** Test listLogs. */
@Test
@Ignore
public void testListLogs() {
try {
ListLogsOptions listOptions = new ListLogsOptions.Builder().workspaceId(workspaceId).build();
LogCollection response = service.listLogs(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getLogs());
assertNotNull(response.getPagination());
// Empirically -- no refresh_url in pagination of listLogs
// assertNotNull(response.getPagination().getRefreshUrl());
// nextUrl may be null
if (response.getPagination().getNextUrl() == null) {
assertNull(response.getPagination().getNextCursor());
} else {
assertNotNull(response.getPagination().getNextCursor());
}
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test listLogs with pagination. */
@Test
@Ignore("To be run locally until we fix the Rate limitation issue")
public void testListLogsWithPaging() {
try {
ListLogsOptions.Builder listOptionsBuilder = new ListLogsOptions.Builder(workspaceId);
listOptionsBuilder.sort("-request_timestamp");
listOptionsBuilder.filter("request.intents:intent:off_topic");
listOptionsBuilder.pageLimit(1L);
LogCollection response = service.listLogs(listOptionsBuilder.build()).execute().getResult();
assertNotNull(response);
assertNotNull(response.getLogs());
assertNotNull(response.getPagination());
// Empirically -- no refresh_url in pagination of listLogs
// assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
assertTrue(response.getLogs().size() == 1);
Log logEntry1 = response.getLogs().get(0);
String cursor = response.getPagination().getNextCursor();
response = service.listLogs(listOptionsBuilder.cursor(cursor).build()).execute().getResult();
assertNotNull(response.getLogs());
assertTrue(response.getLogs().size() == 1);
Log logEntry2 = response.getLogs().get(0);
Date requestDate1 = isoDateFormat.parse(logEntry1.getRequestTimestamp());
Date requestDate2 = isoDateFormat.parse(logEntry2.getRequestTimestamp());
assertTrue(requestDate2.before(requestDate1));
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test createDialogNode. */
@Test
public void testCreateDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
DialogNode response = service.createDialogNode(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test deleteDialogNode. */
@Test
public void testDeleteDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString(); // gotta be unique
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.createDialogNode(createOptions).execute().getResult();
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
try {
GetDialogNodeOptions getOptions =
new GetDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.getDialogNode(getOptions).execute();
fail("deleteDialogNode failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getDialogNode. */
@Test
public void testGetDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
Date start = new Date();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
service.createDialogNode(createOptions).execute().getResult();
try {
GetDialogNodeOptions getOptions =
new GetDialogNodeOptions.Builder()
.workspaceId(workspaceId)
.dialogNode(dialogNodeName)
.includeAudit(true)
.build();
DialogNode response = service.getDialogNode(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription);
assertNotNull(response.created());
assertNotNull(response.updated());
Date now = new Date();
assertTrue(fuzzyBefore(response.created(), now));
assertTrue(fuzzyAfter(response.created(), start));
assertTrue(fuzzyBefore(response.updated(), now));
assertTrue(fuzzyAfter(response.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test listDialogNodes. */
@Test
public void testListDialogNodes() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
try {
ListDialogNodesOptions listOptions = new ListDialogNodesOptions.Builder(workspaceId).build();
DialogNodeCollection response = service.listDialogNodes(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getDialogNodes());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
// nextUrl may be null
// Now add a dialog node and make sure we get it back
String dialogNodeDescription = "Description of " + dialogNodeName;
Date start = new Date();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
service.createDialogNode(createOptions).execute().getResult();
long count = response.getDialogNodes().size();
ListDialogNodesOptions listOptions2 =
new ListDialogNodesOptions.Builder(workspaceId)
.pageLimit(count + 1)
.includeAudit(true)
.build();
DialogNodeCollection response2 = service.listDialogNodes(listOptions2).execute().getResult();
assertNotNull(response2);
assertNotNull(response2.getDialogNodes());
List<DialogNode> dialogNodes = response2.getDialogNodes();
assertTrue(dialogNodes.size() > count);
DialogNode dialogResponse = null;
for (DialogNode node : dialogNodes) {
if (node.dialogNode().equals(dialogNodeName)) {
dialogResponse = node;
break;
}
}
assertNotNull(dialogResponse);
assertNotNull(dialogResponse.description());
assertEquals(dialogResponse.description(), dialogNodeDescription);
Date now = new Date();
assertTrue(fuzzyBefore(dialogResponse.created(), now));
assertTrue(fuzzyAfter(dialogResponse.created(), start));
assertTrue(fuzzyBefore(dialogResponse.updated(), now));
assertTrue(fuzzyAfter(dialogResponse.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test listDialogNodes with pagination. */
@Test
public void testListDialogNodesWithPaging() {
String dialogNodeName1 = "First" + UUID.randomUUID().toString();
String dialogNodeName2 = "Second" + UUID.randomUUID().toString();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName1).build();
service.createDialogNode(createOptions).execute().getResult();
service
.createDialogNode(createOptions.newBuilder().dialogNode(dialogNodeName2).build())
.execute()
.getResult();
try {
ListDialogNodesOptions listOptions =
new ListDialogNodesOptions.Builder()
.workspaceId(workspaceId)
.pageLimit(1L)
.sort("dialog_node")
.build();
DialogNodeCollection response = service.listDialogNodes(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getDialogNodes());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false, found2 = false;
while (true) {
assertNotNull(response.getDialogNodes());
assertTrue(response.getDialogNodes().size() == 1);
found1 |= response.getDialogNodes().get(0).dialogNode().equals(dialogNodeName1);
found2 |= response.getDialogNodes().get(0).dialogNode().equals(dialogNodeName2);
assertTrue(found1 || !found2); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listDialogNodes(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName1).build();
service.deleteDialogNode(deleteOptions).execute();
service
.deleteDialogNode(deleteOptions.newBuilder().dialogNode(dialogNodeName2).build())
.execute();
}
}
/** Test updateDialogNode. */
@Test
public void testUpdateDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
service.createDialogNode(createOptions).execute().getResult();
String dialogNodeName2 = "Test2" + UUID.randomUUID().toString();
try {
String dialogNodeDescription2 = "Updated description of " + dialogNodeName;
UpdateDialogNodeOptions updateOptions =
new UpdateDialogNodeOptions.Builder()
.workspaceId(workspaceId)
.dialogNode(dialogNodeName)
.newDialogNode(dialogNodeName2)
.newDescription(dialogNodeDescription2)
.build();
DialogNode response = service.updateDialogNode(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName2);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName2).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test updateDialogNodeNullable. */
@Test
public void testUpdateDialogNodeNullable() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
DialogNodeNextStep dialogNodeNextStep =
new DialogNodeNextStep.Builder()
.behavior(DialogNodeNextStep.Behavior.SKIP_USER_INPUT)
.build();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.nextStep(dialogNodeNextStep)
.build();
service.createDialogNode(createOptions).execute().getResult();
String dialogNodeName2 = "Test2" + UUID.randomUUID().toString();
try {
String dialogNodeDescription2 = "Updated description of " + dialogNodeName;
UpdateDialogNode updateDialogNode =
new UpdateDialogNode.Builder()
.description(dialogNodeDescription2)
.nextStep(null)
.dialogNode(dialogNodeName2)
.build();
Map<String, Object> body = updateDialogNode.asPatch();
UpdateDialogNodeNullableOptions updateDialogNodeNullableOptions =
new UpdateDialogNodeNullableOptions.Builder()
.workspaceId(workspaceId)
.dialogNode(dialogNodeName)
.body(body)
.build();
DialogNode response =
service.updateDialogNodeNullable(updateDialogNodeNullableOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName2);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription2);
assertNull(response.nextStep());
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName2).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test deleteUserData. */
@Test
public void testDeleteUserData() {
String customerId = "java_sdk_test_id";
try {
DeleteUserDataOptions deleteOptions =
new DeleteUserDataOptions.Builder().customerId(customerId).build();
service.deleteUserData(deleteOptions).execute();
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test list mentions. */
@Test
public void testListMentions() {
String entity = "beverage";
ListMentionsOptions listMentionsOptions =
new ListMentionsOptions.Builder().workspaceId(workspaceId).entity(entity).build();
EntityMentionCollection collection =
service.listMentions(listMentionsOptions).execute().getResult();
assertNotNull(collection);
}
/** Test bulk classify */
@Ignore
@Test
public void testBulkClassify() {
BulkClassifyUtterance bulkClassifyUtterance =
new BulkClassifyUtterance.Builder().text("help I need help").build();
BulkClassifyOptions bulkClassifyOptions =
new BulkClassifyOptions.Builder()
.addInput(bulkClassifyUtterance)
.workspaceId("{workspaceId}")
.build();
BulkClassifyResponse response = service.bulkClassify(bulkClassifyOptions).execute().getResult();
assertNotNull(response);
}
}
|
assistant/src/test/java/com/ibm/watson/assistant/v1/AssistantServiceIT.java
|
/*
* (C) Copyright IBM Corp. 2019, 2021.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.ibm.watson.assistant.v1;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.ibm.cloud.sdk.core.http.Response;
import com.ibm.cloud.sdk.core.http.ServiceCallback;
import com.ibm.cloud.sdk.core.security.BasicAuthenticator;
import com.ibm.cloud.sdk.core.service.exception.NotFoundException;
import com.ibm.cloud.sdk.core.service.exception.UnauthorizedException;
import com.ibm.watson.assistant.v1.model.*;
import com.ibm.watson.common.RetryRunner;
import io.reactivex.Single;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Integration test for the {@link Assistant}. */
@RunWith(RetryRunner.class)
public class AssistantServiceIT extends AssistantServiceTest {
private String exampleIntent;
private Assistant service;
private String workspaceId;
private DateFormat isoDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
/**
* Sets up the tests.
*
* @throws Exception the exception
*/
@Override
@Before
public void setUp() throws Exception {
super.setUp();
this.service = getService();
this.workspaceId = getWorkspaceId();
}
/** Test README. */
@Test
public void testReadme() {
MessageInput input = new MessageInput();
input.setText("Hi");
MessageOptions options = new MessageOptions.Builder(workspaceId).input(input).build();
MessageResponse response = service.message(options).execute().getResult();
System.out.println(response);
}
/**
* Test Example.
*
* @throws InterruptedException the interrupted exception
*/
@Test
public void testExample() throws InterruptedException {
MessageInput input = new MessageInput();
input.setText("Hi");
MessageOptions options = new MessageOptions.Builder(workspaceId).input(input).build();
// sync
MessageResponse response = service.message(options).execute().getResult();
System.out.println(response);
// async
service
.message(options)
.enqueue(
new ServiceCallback<MessageResponse>() {
@Override
public void onResponse(Response<MessageResponse> response) {
System.out.println(response.getResult());
}
@Override
public void onFailure(Exception e) {}
});
// reactive
Single<Response<MessageResponse>> observableRequest =
service.message(options).reactiveRequest();
observableRequest
.subscribeOn(Schedulers.single())
.subscribe(
new Consumer<Response<MessageResponse>>() {
@Override
public void accept(Response<MessageResponse> response) throws Exception {
System.out.println(response.getResult());
}
});
Thread.sleep(5000);
}
/** Ping bad credentials throws exception. */
@Test(expected = UnauthorizedException.class)
public void pingBadCredentialsThrowsException() {
Assistant badService = new Assistant("2019-02-28", new BasicAuthenticator("foo", "bar"));
MessageOptions options = new MessageOptions.Builder(workspaceId).build();
badService.message(options).execute().getResult();
}
/** Test start a conversation without message. */
@Test()
public void testStartAConversationWithoutMessage() {
MessageOptions options = new MessageOptions.Builder(workspaceId).build();
service.message(options).execute().getResult();
}
/**
* Test send messages.
*
* @throws InterruptedException the interrupted exception
*/
@Test
public void testSendMessages() throws InterruptedException {
final String[] messages = new String[] {"turn ac on", "turn right", "no", "yes"};
Context context = null;
MessageInput input = new MessageInput();
for (final String message : messages) {
input.setText(message);
MessageOptions request =
new MessageOptions.Builder(workspaceId)
.input(input)
.alternateIntents(true)
.context(context)
.nodesVisitedDetails(true)
.build();
if (message.equals("yes")) {
RuntimeIntent offTopic =
new RuntimeIntent.Builder().intent("off_topic").confidence(1.0).build();
request = request.newBuilder().addIntent(offTopic).build();
}
MessageResponse response = service.message(request).execute().getResult();
assertMessageFromService(response);
assertNotNull(response.getOutput().getNodesVisitedDetails());
context = new Context();
for (String propName : response.getContext().getPropertyNames()) {
context.put(propName, response.getContext().get(propName));
}
Thread.sleep(500);
}
}
/**
* Assert {@link MessageResponse} from service.
*
* @param message the message from the {@link Assistant}
*/
private void assertMessageFromService(MessageResponse message) {
assertNotNull(message);
assertNotNull(message.getEntities());
assertNotNull(message.getIntents());
}
/** Test message with null. */
@Test(expected = IllegalArgumentException.class)
public void testMessageWithNull() {
service.message(null).execute().getResult();
}
/** Test to string. */
@Test
public void testToString() {
assertNotNull(service.toString());
}
/** Test createCounterexample. */
@Test
public void testCreateCounterexample() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
Counterexample response = service.createCounterexample(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), counterExampleText);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
}
}
/** Test deleteCounterexample. */
@Test
public void testDeleteCounterexample() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
try {
GetCounterexampleOptions getOptions =
new GetCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.getCounterexample(getOptions).execute();
fail("deleteCounterexample failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getCounterexample. */
@Test
public void testGetCounterexample() {
Date start = new Date();
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
try {
GetCounterexampleOptions getOptions =
new GetCounterexampleOptions.Builder(workspaceId, counterExampleText)
.includeAudit(true)
.build();
Counterexample response = service.getCounterexample(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), counterExampleText);
assertNotNull(response.created());
assertNotNull(response.updated());
Date now = new Date();
assertTrue(fuzzyBefore(response.created(), now));
assertTrue(fuzzyAfter(response.created(), start));
assertTrue(fuzzyBefore(response.updated(), now));
assertTrue(fuzzyAfter(response.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
}
}
/** Test listCounterexamples. */
@Test
public void testListCounterexamples() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
try {
ListCounterexamplesOptions listOptions =
new ListCounterexamplesOptions.Builder(workspaceId).build();
CounterexampleCollection ccResponse =
service.listCounterexamples(listOptions).execute().getResult();
assertNotNull(ccResponse);
assertNotNull(ccResponse.getCounterexamples());
assertNotNull(ccResponse.getPagination());
assertNotNull(ccResponse.getPagination().getRefreshUrl());
// nextUrl may be null
Date start = new Date();
// Now add a counterexample and make sure we get it back
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
long count = ccResponse.getCounterexamples().size();
CounterexampleCollection ccResponse2 =
service
.listCounterexamples(
listOptions.newBuilder().pageLimit(count + 1).includeAudit(true).build())
.execute()
.getResult();
assertNotNull(ccResponse2);
assertNotNull(ccResponse2.getCounterexamples());
List<Counterexample> counterexamples = ccResponse2.getCounterexamples();
assertTrue(counterexamples.size() > count);
Counterexample exResponse = null;
for (Counterexample resp : counterexamples) {
if (resp.text().equals(counterExampleText)) {
exResponse = resp;
break;
}
}
assertNotNull(exResponse);
Date now = new Date();
assertTrue(fuzzyBefore(exResponse.created(), now));
assertTrue(fuzzyAfter(exResponse.created(), start));
assertTrue(fuzzyBefore(exResponse.updated(), now));
assertTrue(fuzzyAfter(exResponse.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
try {
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.deleteCounterexample(deleteOptions).execute();
} catch (NotFoundException ex) {
// Okay
}
}
}
/** Test listCounterexamples with paging. */
@Test
public void testListCounterexamplesWithPaging() {
String counterExampleText1 = "alpha" + UUID.randomUUID().toString(); // gotta be unique
String counterExampleText2 = "zeta" + UUID.randomUUID().toString(); // gotta be unique
// Add two counterexamples
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText1).build();
service.createCounterexample(createOptions).execute().getResult();
service
.createCounterexample(createOptions.newBuilder().text(counterExampleText2).build())
.execute()
.getResult();
try {
ListCounterexamplesOptions listOptions =
new ListCounterexamplesOptions.Builder(workspaceId).pageLimit(1L).sort("text").build();
CounterexampleCollection response =
service.listCounterexamples(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false;
boolean found2 = false;
while (true) {
assertNotNull(response.getCounterexamples());
assertTrue(response.getCounterexamples().size() == 1);
found1 |= response.getCounterexamples().get(0).text().equals(counterExampleText1);
found2 |= response.getCounterexamples().get(0).text().equals(counterExampleText2);
assertTrue(found1 || !found2); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listCounterexamples(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText1).build();
service.deleteCounterexample(deleteOptions).execute();
service
.deleteCounterexample(deleteOptions.newBuilder().text(counterExampleText2).build())
.execute();
}
}
/** Test updateCounterexample. */
@Test
public void testUpdateCounterexample() {
String counterExampleText =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
String counterExampleText2 =
"Make me a " + UUID.randomUUID().toString() + " sandwich"; // gotta be unique
CreateCounterexampleOptions createOptions =
new CreateCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
service.createCounterexample(createOptions).execute().getResult();
try {
UpdateCounterexampleOptions updateOptions =
new UpdateCounterexampleOptions.Builder(workspaceId, counterExampleText)
.newText(counterExampleText2)
.build();
Counterexample response = service.updateCounterexample(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), counterExampleText2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteCounterexampleOptions deleteOptions =
new DeleteCounterexampleOptions.Builder(workspaceId, counterExampleText2).build();
service.deleteCounterexample(deleteOptions).execute();
}
}
/** Creates the example intent. */
public void createExampleIntent() {
exampleIntent = "Hello";
try {
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, exampleIntent)
.description("Example Intent")
.build();
service.createIntent(createOptions).execute().getResult();
} catch (Exception ex) {
// Exception is okay if is for Unique Violation
assertTrue(ex.getLocalizedMessage().startsWith("Unique Violation"));
}
}
/** Test createExample. */
@Test
public void testCreateExample() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
Example response = service.createExample(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), exampleText);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test deleteExample. */
@Test
public void testDeleteExample() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
try {
GetExampleOptions getOptions =
new GetExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.getExample(getOptions).execute().getResult();
fail("deleteCounterexample failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getExample. */
@Test
public void testGetExample() {
createExampleIntent();
Date start = new Date();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
try {
GetExampleOptions getOptions =
new GetExampleOptions.Builder(workspaceId, exampleIntent, exampleText)
.includeAudit(true)
.build();
Example response = service.getExample(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), exampleText);
assertNotNull(response.created());
assertNotNull(response.updated());
Date now = new Date();
assertTrue(fuzzyBefore(response.created(), now));
assertTrue(fuzzyAfter(response.created(), start));
assertTrue(fuzzyBefore(response.updated(), now));
assertTrue(fuzzyAfter(response.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test listExamples. */
@Test
public void testListExamples() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
try {
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, exampleIntent).includeAudit(true).build();
ExampleCollection ecResponse = service.listExamples(listOptions).execute().getResult();
assertNotNull(ecResponse);
assertNotNull(ecResponse.getExamples());
assertNotNull(ecResponse.getPagination());
assertNotNull(ecResponse.getPagination().getRefreshUrl());
// nextUrl may be null
Date start = new Date();
// Now add an example and make sure we get it back
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
long count = ecResponse.getExamples().size();
ExampleCollection ecResponse2 =
service
.listExamples(
listOptions.newBuilder().pageLimit(count + 1).includeAudit(true).build())
.execute()
.getResult();
assertNotNull(ecResponse2);
assertNotNull(ecResponse2.getExamples());
List<Example> examples = ecResponse2.getExamples();
assertTrue(examples.size() > count);
Example exResponse = null;
for (Example resp : examples) {
if (resp.text().equals(exampleText)) {
exResponse = resp;
break;
}
}
assertNotNull(exResponse);
Date now = new Date();
assertTrue(fuzzyBefore(exResponse.created(), now));
assertTrue(fuzzyAfter(exResponse.created(), start));
assertTrue(fuzzyBefore(exResponse.updated(), now));
assertTrue(fuzzyAfter(exResponse.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test listExamples with paging. */
@Test
public void testListExamplesWithPaging() {
createExampleIntent();
String exampleText1 = "Alpha " + UUID.randomUUID().toString(); // gotta be unique
String exampleText2 = "Zeta " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText1).build();
service.createExample(createOptions).execute().getResult();
service
.createExample(createOptions.newBuilder().text(exampleText2).build())
.execute()
.getResult();
try {
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, exampleIntent)
.pageLimit(1L)
.sort("-text")
.build();
ExampleCollection response = service.listExamples(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getExamples());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false;
boolean found2 = false;
while (true) {
assertNotNull(response.getExamples());
assertTrue(response.getExamples().size() == 1);
found1 |= response.getExamples().get(0).text().equals(exampleText1);
found2 |= response.getExamples().get(0).text().equals(exampleText2);
assertTrue(found2 || !found1); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listExamples(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText1).build();
service.deleteExample(deleteOptions).execute();
service.deleteExample(deleteOptions.newBuilder().text(exampleText2).build()).execute();
}
}
/** Test updateExample. */
@Test
public void testUpdateExample() {
createExampleIntent();
String exampleText = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
String exampleText2 = "Howdy " + UUID.randomUUID().toString(); // gotta be unique
CreateExampleOptions createOptions =
new CreateExampleOptions.Builder(workspaceId, exampleIntent, exampleText).build();
service.createExample(createOptions).execute().getResult();
try {
UpdateExampleOptions updateOptions =
new UpdateExampleOptions.Builder(workspaceId, exampleIntent, exampleText)
.newText(exampleText2)
.build();
Example response = service.updateExample(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.text());
assertEquals(response.text(), exampleText2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteExampleOptions deleteOptions =
new DeleteExampleOptions.Builder(workspaceId, exampleIntent, exampleText2).build();
service.deleteExample(deleteOptions).execute();
}
}
/** Test createIntent. */
@Test
public void testCreateIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
Date start = new Date();
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
Intent response = service.createIntent(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.getIntent());
assertEquals(response.getIntent(), intentName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), intentDescription);
Date now = new Date();
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, intentName).includeAudit(true).build();
ExampleCollection ecResponse = service.listExamples(listOptions).execute().getResult();
assertNotNull(ecResponse);
assertNotNull(ecResponse.getExamples());
List<Example> examples = ecResponse.getExamples();
assertTrue(examples.size() == 1);
assertEquals(examples.get(0).text(), intentExample);
assertTrue(fuzzyBefore(examples.get(0).created(), now));
assertTrue(fuzzyAfter(examples.get(0).created(), start));
assertTrue(fuzzyBefore(examples.get(0).updated(), now));
assertTrue(fuzzyAfter(examples.get(0).updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test deleteIntent. */
@Test
public void testDeleteIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName).build();
service.createIntent(createOptions).execute().getResult();
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
try {
GetIntentOptions getOptions = new GetIntentOptions.Builder(workspaceId, intentName).build();
service.getIntent(getOptions).execute();
fail("deleteIntent failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getIntent. */
@Test
public void testGetIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
Date start = new Date();
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder()
.workspaceId(workspaceId)
.intent(intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
service.createIntent(createOptions).execute().getResult();
try {
GetIntentOptions getOptions =
new GetIntentOptions.Builder()
.workspaceId(workspaceId)
.intent(intentName)
.export(true)
.includeAudit(true)
.build();
Intent response = service.getIntent(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntent());
assertEquals(response.getIntent(), intentName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), intentDescription);
assertNotNull(response.getExamples());
assertNotNull(response.getCreated());
assertNotNull(response.getUpdated());
Date now = new Date();
assertTrue(fuzzyBefore(response.getCreated(), now));
assertTrue(fuzzyAfter(response.getCreated(), start));
assertTrue(fuzzyBefore(response.getUpdated(), now));
assertTrue(fuzzyAfter(response.getUpdated(), start));
List<Example> examples = response.getExamples();
assertTrue(examples.size() == 1);
assertEquals(examples.get(0).text(), intentExample);
assertTrue(fuzzyBefore(examples.get(0).created(), now));
assertTrue(fuzzyAfter(examples.get(0).created(), start));
assertTrue(fuzzyBefore(examples.get(0).updated(), now));
assertTrue(fuzzyAfter(examples.get(0).updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test listIntents. */
@Test
public void testListIntents() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
try {
ListIntentsOptions listOptions =
new ListIntentsOptions.Builder(workspaceId).includeAudit(true).build();
IntentCollection response = service.listIntents(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntents());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
// nextUrl may be null
// Now add an intent and make sure we get it back
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
Date start = new Date();
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
service.createIntent(createOptions).execute().getResult();
long count = response.getIntents().size();
ListIntentsOptions listOptions2 =
new ListIntentsOptions.Builder(workspaceId)
.export(true)
.pageLimit(count + 1)
.includeAudit(true)
.build();
IntentCollection response2 = service.listIntents(listOptions2).execute().getResult();
assertNotNull(response2);
assertNotNull(response2.getIntents());
List<Intent> intents = response2.getIntents();
assertTrue(intents.size() > count);
Intent ieResponse = null;
for (Intent resp : intents) {
if (resp.getIntent().equals(intentName)) {
ieResponse = resp;
break;
}
}
assertNotNull(ieResponse);
assertNotNull(ieResponse.getDescription());
assertEquals(ieResponse.getDescription(), intentDescription);
assertNotNull(ieResponse.getExamples());
assertTrue(ieResponse.getExamples().size() == 1);
assertEquals(ieResponse.getExamples().get(0).text(), intentExample);
Date now = new Date();
assertTrue(fuzzyBefore(ieResponse.getCreated(), now));
assertTrue(fuzzyAfter(ieResponse.getCreated(), start));
assertTrue(fuzzyBefore(ieResponse.getUpdated(), now));
assertTrue(fuzzyAfter(ieResponse.getUpdated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test listIntents with paging. */
@Test
public void testListIntentsWithPaging() {
String intentName1 = "First" + UUID.randomUUID().toString(); // gotta be unique
String intentName2 = "Second" + UUID.randomUUID().toString(); // gotta be unique
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName1).build();
service.createIntent(createOptions).execute().getResult();
service
.createIntent(createOptions.newBuilder().intent(intentName2).build())
.execute()
.getResult();
try {
ListIntentsOptions listOptions =
new ListIntentsOptions.Builder()
.workspaceId(workspaceId)
.export(true)
.pageLimit(1L)
.sort("modified")
.includeAudit(true)
.build();
IntentCollection response = service.listIntents(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntents());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false;
boolean found2 = false;
while (true) {
assertNotNull(response.getIntents());
assertTrue(response.getIntents().size() == 1);
found1 |= response.getIntents().get(0).getIntent().equals(intentName1);
found2 |= response.getIntents().get(0).getIntent().equals(intentName2);
assertTrue(found1 || !found2); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listIntents(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName1).build();
service.deleteIntent(deleteOptions).execute();
service.deleteIntent(deleteOptions.newBuilder().intent(intentName2).build()).execute();
}
}
/** Test updateIntent. */
@Test
public void testUpdateIntent() {
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
CreateIntentOptions createOptions =
new CreateIntentOptions.Builder(workspaceId, intentName)
.description(intentDescription)
.examples(intentExamples)
.build();
service.createIntent(createOptions).execute().getResult();
try {
String intentDescription2 = "Updated description of " + intentName;
String intentExample2 = "Updated Example of " + intentName;
List<Example> intentExamples2 = new ArrayList<>();
intentExamples2.add(new Example.Builder().text(intentExample2).build());
Date start = new Date();
UpdateIntentOptions updateOptions =
new UpdateIntentOptions.Builder(workspaceId, intentName)
.newDescription(intentDescription2)
.newExamples(intentExamples2)
.build();
Intent response = service.updateIntent(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getIntent());
assertEquals(response.getIntent(), intentName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), intentDescription2);
Date now = new Date();
ListExamplesOptions listOptions =
new ListExamplesOptions.Builder(workspaceId, intentName).includeAudit(true).build();
ExampleCollection ecResponse = service.listExamples(listOptions).execute().getResult();
assertNotNull(ecResponse);
assertNotNull(ecResponse.getExamples());
List<Example> examples = ecResponse.getExamples();
assertTrue(examples.size() == 1);
assertEquals(examples.get(0).text(), intentExample2);
assertTrue(fuzzyBefore(examples.get(0).created(), now));
assertTrue(fuzzyAfter(examples.get(0).created(), start));
assertTrue(fuzzyBefore(examples.get(0).updated(), now));
assertTrue(fuzzyAfter(examples.get(0).updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteIntentOptions deleteOptions =
new DeleteIntentOptions.Builder(workspaceId, intentName).build();
service.deleteIntent(deleteOptions).execute();
}
}
/** Test createWorkspace. */
@Test
public void testCreateWorkspace() {
String workspaceName = "API Test " + UUID.randomUUID().toString(); // gotta be unique
String workspaceDescription = "Description of " + workspaceName;
String workspaceLanguage = "en";
// metadata
Map<String, Object> workspaceMetadata = new HashMap<String, Object>();
String metadataValue = "value for " + workspaceName;
workspaceMetadata.put("key", metadataValue);
// intents
List<CreateIntent> workspaceIntents = new ArrayList<CreateIntent>();
String intentName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String intentDescription = "Description of " + intentName;
String intentExample = "Example of " + intentName;
List<Example> intentExamples = new ArrayList<>();
intentExamples.add(new Example.Builder().text(intentExample).build());
workspaceIntents.add(
new CreateIntent.Builder()
.intent(intentName)
.description(intentDescription)
.examples(intentExamples)
.build());
// entities
List<CreateEntity> workspaceEntities = new ArrayList<CreateEntity>();
String entityName = "Hello" + UUID.randomUUID().toString(); // gotta be unique
String entityDescription = "Description of " + entityName;
String entityValue = "Value of " + entityName;
String entityValueSynonym = "Synonym for Value of " + entityName;
List<CreateValue> entityValues = new ArrayList<CreateValue>();
entityValues.add(
new CreateValue.Builder().value(entityValue).addSynonym(entityValueSynonym).build());
workspaceEntities.add(
new CreateEntity.Builder()
.entity(entityName)
.description(entityDescription)
.values(entityValues)
.build());
// counterexamples
List<Counterexample> workspaceCounterExamples = new ArrayList<>();
String counterExampleText = "Counterexample for " + workspaceName;
workspaceCounterExamples.add(new Counterexample.Builder().text(counterExampleText).build());
// systemSettings
WorkspaceSystemSettingsDisambiguation disambiguation =
new WorkspaceSystemSettingsDisambiguation.Builder()
.enabled(true)
.noneOfTheAbovePrompt("none of the above")
.prompt("prompt")
.sensitivity(WorkspaceSystemSettingsDisambiguation.Sensitivity.HIGH)
.build();
WorkspaceSystemSettingsTooling tooling =
new WorkspaceSystemSettingsTooling.Builder().storeGenericResponses(true).build();
WorkspaceSystemSettings systemSettings =
new WorkspaceSystemSettings.Builder()
.disambiguation(disambiguation)
.tooling(tooling)
.build();
// webhooks
String webhookHeaderName = "Webhook-Header";
String webhookHeaderValue = "webhook_header_value";
String webhookName = "java-sdk-test-webhook";
String webhookUrl = "https://github.com/watson-developer-cloud/java-sdk";
WebhookHeader webhookHeader =
new WebhookHeader.Builder().name(webhookHeaderName).value(webhookHeaderValue).build();
Webhook webhook =
new Webhook.Builder().name(webhookName).url(webhookUrl).addHeaders(webhookHeader).build();
CreateWorkspaceOptions createOptions =
new CreateWorkspaceOptions.Builder()
.name(workspaceName)
.description(workspaceDescription)
.language(workspaceLanguage)
.metadata(workspaceMetadata)
.intents(workspaceIntents)
.entities(workspaceEntities)
.counterexamples(workspaceCounterExamples)
.systemSettings(systemSettings)
.addWebhooks(webhook)
.build();
String workspaceId = null;
try {
Workspace response = service.createWorkspace(createOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getWorkspaceId());
workspaceId = response.getWorkspaceId();
assertNotNull(response.getName());
assertEquals(response.getName(), workspaceName);
assertNotNull(response.getDescription());
assertEquals(response.getDescription(), workspaceDescription);
assertNotNull(response.getLanguage());
assertEquals(response.getLanguage(), workspaceLanguage);
// metadata
assertNotNull(response.getMetadata());
assertNotNull(response.getMetadata().get("key"));
assertEquals(response.getMetadata().get("key"), metadataValue);
GetWorkspaceOptions getOptions =
new GetWorkspaceOptions.Builder(workspaceId).export(true).build();
Workspace exResponse = service.getWorkspace(getOptions).execute().getResult();
assertNotNull(exResponse);
// intents
assertNotNull(exResponse.getIntents());
assertTrue(exResponse.getIntents().size() == 1);
assertNotNull(exResponse.getIntents().get(0).getIntent());
assertEquals(exResponse.getIntents().get(0).getIntent(), intentName);
assertNotNull(exResponse.getIntents().get(0).getDescription());
assertEquals(exResponse.getIntents().get(0).getDescription(), intentDescription);
assertNotNull(exResponse.getIntents().get(0).getExamples());
assertTrue(exResponse.getIntents().get(0).getExamples().size() == 1);
assertNotNull(exResponse.getIntents().get(0).getExamples().get(0));
assertNotNull(exResponse.getIntents().get(0).getExamples().get(0).text());
assertEquals(exResponse.getIntents().get(0).getExamples().get(0).text(), intentExample);
// entities
assertNotNull(exResponse.getEntities());
assertTrue(exResponse.getEntities().size() == 1);
assertNotNull(exResponse.getEntities().get(0).getEntity());
assertEquals(exResponse.getEntities().get(0).getEntity(), entityName);
assertNotNull(exResponse.getEntities().get(0).getDescription());
assertEquals(exResponse.getEntities().get(0).getDescription(), entityDescription);
assertNotNull(exResponse.getEntities().get(0).getValues());
assertTrue(exResponse.getEntities().get(0).getValues().size() == 1);
assertNotNull(exResponse.getEntities().get(0).getValues().get(0).value());
assertEquals(exResponse.getEntities().get(0).getValues().get(0).value(), entityValue);
assertNotNull(exResponse.getEntities().get(0).getValues().get(0).synonyms());
assertTrue(exResponse.getEntities().get(0).getValues().get(0).synonyms().size() == 1);
assertEquals(
exResponse.getEntities().get(0).getValues().get(0).synonyms().get(0), entityValueSynonym);
// counterexamples
assertNotNull(exResponse.getCounterexamples());
assertTrue(exResponse.getCounterexamples().size() == 1);
assertNotNull(exResponse.getCounterexamples().get(0).text());
assertEquals(exResponse.getCounterexamples().get(0).text(), counterExampleText);
// systemSettings
assertNotNull(exResponse.getSystemSettings());
assertEquals(
exResponse.getSystemSettings().disambiguation().noneOfTheAbovePrompt(),
disambiguation.noneOfTheAbovePrompt());
assertEquals(
exResponse.getSystemSettings().disambiguation().sensitivity(),
disambiguation.sensitivity());
assertEquals(
exResponse.getSystemSettings().disambiguation().prompt(), disambiguation.prompt());
assertEquals(
exResponse.getSystemSettings().disambiguation().enabled(), disambiguation.enabled());
assertEquals(
exResponse.getSystemSettings().tooling().storeGenericResponses(),
tooling.storeGenericResponses());
// webhooks
assertNotNull(exResponse.getWebhooks());
assertEquals(webhookName, exResponse.getWebhooks().get(0).name());
assertEquals(webhookUrl, exResponse.getWebhooks().get(0).url());
assertEquals(webhookHeaderName, exResponse.getWebhooks().get(0).headers().get(0).name());
assertEquals(webhookHeaderValue, exResponse.getWebhooks().get(0).headers().get(0).value());
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
if (workspaceId != null) {
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
}
}
}
/** Test deleteWorkspace. */
@Test
public void testDeleteWorkspace() {
CreateWorkspaceOptions createOptions = new CreateWorkspaceOptions.Builder().build();
String workspaceId = null;
try {
Workspace response = service.createWorkspace(createOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getWorkspaceId());
workspaceId = response.getWorkspaceId();
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
GetWorkspaceOptions getOptions =
new GetWorkspaceOptions.Builder(workspaceId).export(true).build();
service.getWorkspace(getOptions).execute().getResult();
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
workspaceId = null;
} finally {
// Clean up
if (workspaceId != null) {
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
}
}
}
/** Test getWorkspace. */
@Test
public void testGetWorkspace() {
GetWorkspaceOptions getOptions =
new GetWorkspaceOptions.Builder(workspaceId).export(false).includeAudit(true).build();
Workspace response = service.getWorkspace(getOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.getWorkspaceId());
assertEquals(response.getWorkspaceId(), workspaceId);
assertNotNull(response.getName());
assertNotNull(response.getLanguage());
Date now = new Date();
assertNotNull(response.getCreated());
assertNotNull(response.getUpdated());
assertTrue(fuzzyBefore(response.getCreated(), now));
assertTrue(fuzzyBefore(response.getUpdated(), now));
// metadata, intents, entities, dialogNodes, and counterexamples could be null
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test listWorkspaces. */
@Test
public void testListWorkspaces() {
ListWorkspacesOptions listOptions = new ListWorkspacesOptions.Builder().build();
WorkspaceCollection response = service.listWorkspaces(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getWorkspaces());
assertTrue(response.getWorkspaces().size() > 0);
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
Workspace wResponse = null;
for (Workspace resp : response.getWorkspaces()) {
if (resp.getWorkspaceId().equals(workspaceId)) {
wResponse = resp;
break;
}
}
assertNotNull(wResponse);
assertNotNull(wResponse.getName());
}
/** Test listWorkspaces with paging. */
@Test
public void testListWorkspacesWithPaging() {
ListWorkspacesOptions listOptions =
new ListWorkspacesOptions.Builder().pageLimit(1L).sort("-updated").build();
WorkspaceCollection response = service.listWorkspaces(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
boolean found = false;
while (true) {
assertNotNull(response.getWorkspaces());
assertTrue(response.getWorkspaces().size() == 1);
found |= response.getWorkspaces().get(0).getWorkspaceId().equals(workspaceId);
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listWorkspaces(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found);
}
/** Test updateWorkspace. */
@Test
public void testUpdateWorkspace() {
String workspaceName = "testUpdateWorkspace";
String workspaceDescription = "Description for testUpdateWorkspace";
// intents
CreateIntent intent0 = new CreateIntent.Builder("Hello").build();
CreateIntent intent1 = new CreateIntent.Builder("Goodbye").build();
// entities
CreateEntity entity0 = new CreateEntity.Builder("animal").build();
CreateEntity entity1 = new CreateEntity.Builder("beverage").build();
// counterexamples
Counterexample counterexample0 = new Counterexample.Builder("What are you wearing?").build();
Counterexample counterexample1 = new Counterexample.Builder("What are you eating?").build();
CreateWorkspaceOptions createOptions =
new CreateWorkspaceOptions.Builder()
.name(workspaceName)
.description(workspaceDescription)
.addIntent(intent0)
.addIntent(intent1)
.addEntity(entity0)
.addEntity(entity1)
.addCounterexample(counterexample0)
.addCounterexample(counterexample1)
.build();
String workspaceId = null;
try {
Workspace createResponse = service.createWorkspace(createOptions).execute().getResult();
assertNotNull(createResponse);
assertNotNull(createResponse.getWorkspaceId());
workspaceId = createResponse.getWorkspaceId();
String counterExampleText = "What are you drinking";
Counterexample counterexample2 = new Counterexample.Builder(counterExampleText).build();
// webhooks
String webhookHeaderName = "Webhook-Header";
String webhookHeaderValue = "webhook_header_value";
String webhookName = "java-sdk-test-webhook";
String webhookUrl = "https://github.com/watson-developer-cloud/java-sdk";
WebhookHeader webhookHeader =
new WebhookHeader.Builder().name(webhookHeaderName).value(webhookHeaderValue).build();
Webhook webhook =
new Webhook.Builder().name(webhookName).url(webhookUrl).addHeaders(webhookHeader).build();
UpdateWorkspaceOptions updateOptions =
new UpdateWorkspaceOptions.Builder(workspaceId)
.addCounterexample(counterexample2)
.append(false)
.addWebhooks(webhook)
.build();
Workspace updateResponse = service.updateWorkspace(updateOptions).execute().getResult();
assertNotNull(updateResponse);
GetCounterexampleOptions getOptions =
new GetCounterexampleOptions.Builder(workspaceId, counterExampleText).build();
Counterexample eResponse = service.getCounterexample(getOptions).execute().getResult();
assertNotNull(eResponse);
assertNotNull(eResponse.text());
assertEquals(eResponse.text(), counterExampleText);
// webhooks
assertNotNull(updateResponse.getWebhooks());
assertEquals(webhookName, updateResponse.getWebhooks().get(0).name());
assertEquals(webhookUrl, updateResponse.getWebhooks().get(0).url());
assertEquals(webhookHeaderName, updateResponse.getWebhooks().get(0).headers().get(0).name());
assertEquals(
webhookHeaderValue, updateResponse.getWebhooks().get(0).headers().get(0).value());
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
if (workspaceId != null) {
DeleteWorkspaceOptions deleteOptions =
new DeleteWorkspaceOptions.Builder(workspaceId).build();
service.deleteWorkspace(deleteOptions).execute();
}
}
}
/** Test listLogs. */
@Test
public void testListLogs() {
try {
ListLogsOptions listOptions = new ListLogsOptions.Builder().workspaceId(workspaceId).build();
LogCollection response = service.listLogs(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getLogs());
assertNotNull(response.getPagination());
// Empirically -- no refresh_url in pagination of listLogs
// assertNotNull(response.getPagination().getRefreshUrl());
// nextUrl may be null
if (response.getPagination().getNextUrl() == null) {
assertNull(response.getPagination().getNextCursor());
} else {
assertNotNull(response.getPagination().getNextCursor());
}
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test listLogs with pagination. */
@Test
@Ignore("To be run locally until we fix the Rate limitation issue")
public void testListLogsWithPaging() {
try {
ListLogsOptions.Builder listOptionsBuilder = new ListLogsOptions.Builder(workspaceId);
listOptionsBuilder.sort("-request_timestamp");
listOptionsBuilder.filter("request.intents:intent:off_topic");
listOptionsBuilder.pageLimit(1L);
LogCollection response = service.listLogs(listOptionsBuilder.build()).execute().getResult();
assertNotNull(response);
assertNotNull(response.getLogs());
assertNotNull(response.getPagination());
// Empirically -- no refresh_url in pagination of listLogs
// assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
assertTrue(response.getLogs().size() == 1);
Log logEntry1 = response.getLogs().get(0);
String cursor = response.getPagination().getNextCursor();
response = service.listLogs(listOptionsBuilder.cursor(cursor).build()).execute().getResult();
assertNotNull(response.getLogs());
assertTrue(response.getLogs().size() == 1);
Log logEntry2 = response.getLogs().get(0);
Date requestDate1 = isoDateFormat.parse(logEntry1.getRequestTimestamp());
Date requestDate2 = isoDateFormat.parse(logEntry2.getRequestTimestamp());
assertTrue(requestDate2.before(requestDate1));
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test createDialogNode. */
@Test
public void testCreateDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
DialogNode response = service.createDialogNode(createOptions).execute().getResult();
try {
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test deleteDialogNode. */
@Test
public void testDeleteDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString(); // gotta be unique
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.createDialogNode(createOptions).execute().getResult();
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
try {
GetDialogNodeOptions getOptions =
new GetDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.getDialogNode(getOptions).execute();
fail("deleteDialogNode failed");
} catch (Exception ex) {
// Expected result
assertTrue(ex instanceof NotFoundException);
}
}
/** Test getDialogNode. */
@Test
public void testGetDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
Date start = new Date();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
service.createDialogNode(createOptions).execute().getResult();
try {
GetDialogNodeOptions getOptions =
new GetDialogNodeOptions.Builder()
.workspaceId(workspaceId)
.dialogNode(dialogNodeName)
.includeAudit(true)
.build();
DialogNode response = service.getDialogNode(getOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription);
assertNotNull(response.created());
assertNotNull(response.updated());
Date now = new Date();
assertTrue(fuzzyBefore(response.created(), now));
assertTrue(fuzzyAfter(response.created(), start));
assertTrue(fuzzyBefore(response.updated(), now));
assertTrue(fuzzyAfter(response.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test listDialogNodes. */
@Test
public void testListDialogNodes() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
try {
ListDialogNodesOptions listOptions = new ListDialogNodesOptions.Builder(workspaceId).build();
DialogNodeCollection response = service.listDialogNodes(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getDialogNodes());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
// nextUrl may be null
// Now add a dialog node and make sure we get it back
String dialogNodeDescription = "Description of " + dialogNodeName;
Date start = new Date();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
service.createDialogNode(createOptions).execute().getResult();
long count = response.getDialogNodes().size();
ListDialogNodesOptions listOptions2 =
new ListDialogNodesOptions.Builder(workspaceId)
.pageLimit(count + 1)
.includeAudit(true)
.build();
DialogNodeCollection response2 = service.listDialogNodes(listOptions2).execute().getResult();
assertNotNull(response2);
assertNotNull(response2.getDialogNodes());
List<DialogNode> dialogNodes = response2.getDialogNodes();
assertTrue(dialogNodes.size() > count);
DialogNode dialogResponse = null;
for (DialogNode node : dialogNodes) {
if (node.dialogNode().equals(dialogNodeName)) {
dialogResponse = node;
break;
}
}
assertNotNull(dialogResponse);
assertNotNull(dialogResponse.description());
assertEquals(dialogResponse.description(), dialogNodeDescription);
Date now = new Date();
assertTrue(fuzzyBefore(dialogResponse.created(), now));
assertTrue(fuzzyAfter(dialogResponse.created(), start));
assertTrue(fuzzyBefore(dialogResponse.updated(), now));
assertTrue(fuzzyAfter(dialogResponse.updated(), start));
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test listDialogNodes with pagination. */
@Test
public void testListDialogNodesWithPaging() {
String dialogNodeName1 = "First" + UUID.randomUUID().toString();
String dialogNodeName2 = "Second" + UUID.randomUUID().toString();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName1).build();
service.createDialogNode(createOptions).execute().getResult();
service
.createDialogNode(createOptions.newBuilder().dialogNode(dialogNodeName2).build())
.execute()
.getResult();
try {
ListDialogNodesOptions listOptions =
new ListDialogNodesOptions.Builder()
.workspaceId(workspaceId)
.pageLimit(1L)
.sort("dialog_node")
.build();
DialogNodeCollection response = service.listDialogNodes(listOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.getDialogNodes());
assertNotNull(response.getPagination());
assertNotNull(response.getPagination().getRefreshUrl());
assertNotNull(response.getPagination().getNextUrl());
assertNotNull(response.getPagination().getNextCursor());
boolean found1 = false, found2 = false;
while (true) {
assertNotNull(response.getDialogNodes());
assertTrue(response.getDialogNodes().size() == 1);
found1 |= response.getDialogNodes().get(0).dialogNode().equals(dialogNodeName1);
found2 |= response.getDialogNodes().get(0).dialogNode().equals(dialogNodeName2);
assertTrue(found1 || !found2); // verify sort
if (response.getPagination().getNextCursor() == null) {
break;
}
String cursor = response.getPagination().getNextCursor();
response =
service
.listDialogNodes(listOptions.newBuilder().cursor(cursor).build())
.execute()
.getResult();
}
assertTrue(found1 && found2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName1).build();
service.deleteDialogNode(deleteOptions).execute();
service
.deleteDialogNode(deleteOptions.newBuilder().dialogNode(dialogNodeName2).build())
.execute();
}
}
/** Test updateDialogNode. */
@Test
public void testUpdateDialogNode() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.build();
service.createDialogNode(createOptions).execute().getResult();
String dialogNodeName2 = "Test2" + UUID.randomUUID().toString();
try {
String dialogNodeDescription2 = "Updated description of " + dialogNodeName;
UpdateDialogNodeOptions updateOptions =
new UpdateDialogNodeOptions.Builder()
.workspaceId(workspaceId)
.dialogNode(dialogNodeName)
.newDialogNode(dialogNodeName2)
.newDescription(dialogNodeDescription2)
.build();
DialogNode response = service.updateDialogNode(updateOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName2);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription2);
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName2).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test updateDialogNodeNullable. */
@Test
public void testUpdateDialogNodeNullable() {
String dialogNodeName = "Test" + UUID.randomUUID().toString();
String dialogNodeDescription = "Description of " + dialogNodeName;
DialogNodeNextStep dialogNodeNextStep =
new DialogNodeNextStep.Builder()
.behavior(DialogNodeNextStep.Behavior.SKIP_USER_INPUT)
.build();
CreateDialogNodeOptions createOptions =
new CreateDialogNodeOptions.Builder(workspaceId, dialogNodeName)
.description(dialogNodeDescription)
.nextStep(dialogNodeNextStep)
.build();
service.createDialogNode(createOptions).execute().getResult();
String dialogNodeName2 = "Test2" + UUID.randomUUID().toString();
try {
String dialogNodeDescription2 = "Updated description of " + dialogNodeName;
UpdateDialogNode updateDialogNode =
new UpdateDialogNode.Builder()
.description(dialogNodeDescription2)
.nextStep(null)
.dialogNode(dialogNodeName2)
.build();
Map<String, Object> body = updateDialogNode.asPatch();
UpdateDialogNodeNullableOptions updateDialogNodeNullableOptions =
new UpdateDialogNodeNullableOptions.Builder()
.workspaceId(workspaceId)
.dialogNode(dialogNodeName)
.body(body)
.build();
DialogNode response =
service.updateDialogNodeNullable(updateDialogNodeNullableOptions).execute().getResult();
assertNotNull(response);
assertNotNull(response.dialogNode());
assertEquals(response.dialogNode(), dialogNodeName2);
assertNotNull(response.description());
assertEquals(response.description(), dialogNodeDescription2);
assertNull(response.nextStep());
} catch (Exception ex) {
fail(ex.getMessage());
} finally {
// Clean up
DeleteDialogNodeOptions deleteOptions =
new DeleteDialogNodeOptions.Builder(workspaceId, dialogNodeName2).build();
service.deleteDialogNode(deleteOptions).execute();
}
}
/** Test deleteUserData. */
@Test
public void testDeleteUserData() {
String customerId = "java_sdk_test_id";
try {
DeleteUserDataOptions deleteOptions =
new DeleteUserDataOptions.Builder().customerId(customerId).build();
service.deleteUserData(deleteOptions).execute();
} catch (Exception ex) {
fail(ex.getMessage());
}
}
/** Test list mentions. */
@Test
public void testListMentions() {
String entity = "beverage";
ListMentionsOptions listMentionsOptions =
new ListMentionsOptions.Builder().workspaceId(workspaceId).entity(entity).build();
EntityMentionCollection collection =
service.listMentions(listMentionsOptions).execute().getResult();
assertNotNull(collection);
}
/** Test bulk classify */
@Ignore
@Test
public void testBulkClassify() {
BulkClassifyUtterance bulkClassifyUtterance =
new BulkClassifyUtterance.Builder().text("help I need help").build();
BulkClassifyOptions bulkClassifyOptions =
new BulkClassifyOptions.Builder()
.addInput(bulkClassifyUtterance)
.workspaceId("{workspaceId}")
.build();
BulkClassifyResponse response = service.bulkClassify(bulkClassifyOptions).execute().getResult();
assertNotNull(response);
}
}
|
test(assistant-v1): update ITs
|
assistant/src/test/java/com/ibm/watson/assistant/v1/AssistantServiceIT.java
|
test(assistant-v1): update ITs
|
|
Java
|
apache-2.0
|
f0f325feeef138113dbfdb69a4ecb51856b30310
| 0
|
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2012 Ausenco Engineering Canada Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package com.jaamsim.collada;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.Vector;
import javax.xml.parsers.SAXParserFactory;
import com.jaamsim.MeshFiles.MeshData;
import com.jaamsim.MeshFiles.VertexMap;
import com.jaamsim.math.Color4d;
import com.jaamsim.math.ConvexHull;
import com.jaamsim.math.Mat4d;
import com.jaamsim.math.Quaternion;
import com.jaamsim.math.Vec3d;
import com.jaamsim.math.Vec4d;
import com.jaamsim.render.RenderException;
import com.jaamsim.xml.XmlNode;
import com.jaamsim.xml.XmlParser;
/**
* Inspired by the Collada loader for Sweethome3d by Emmanuel Puybaret / eTeks <info@eteks.com>.
*/
public class ColParser {
public static MeshData parse(URL asset) throws RenderException {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(false);
try {
ColParser colParser = new ColParser(asset);
colParser.processContent();
return colParser.getData();
} catch (Exception e) {
e.printStackTrace();
throw new RenderException(e.getMessage());
}
}
private static void parseAssert(boolean b) {
if (!b) {
throw new RenderException("Failed Collada parsing assert");
}
}
static final List<String> DOUBLE_ARRAY_TAGS;
static final List<String> INT_ARRAY_TAGS;
static final List<String> STRING_ARRAY_TAGS;
static final List<String> BOOLEAN_ARRAY_TAGS;
static {
DOUBLE_ARRAY_TAGS = new ArrayList<String>();
DOUBLE_ARRAY_TAGS.add("float_array");
DOUBLE_ARRAY_TAGS.add("rotate");
DOUBLE_ARRAY_TAGS.add("translate");
DOUBLE_ARRAY_TAGS.add("scale");
DOUBLE_ARRAY_TAGS.add("lookat");
DOUBLE_ARRAY_TAGS.add("matrix");
DOUBLE_ARRAY_TAGS.add("color");
INT_ARRAY_TAGS = new ArrayList<String>();
INT_ARRAY_TAGS.add("int_array");
INT_ARRAY_TAGS.add("vcount");
INT_ARRAY_TAGS.add("p");
INT_ARRAY_TAGS.add("h");
INT_ARRAY_TAGS.add("v");
STRING_ARRAY_TAGS = new ArrayList<String>();
STRING_ARRAY_TAGS.add("Name_array");
STRING_ARRAY_TAGS.add("IDREF_array");
BOOLEAN_ARRAY_TAGS = new ArrayList<String>();
BOOLEAN_ARRAY_TAGS.add("boolean_array");
}
private static class FaceSubGeo {
public VertexMap vMap;
int[] indices;
public String materialSymbol;
public FaceSubGeo(int size) {
vMap = new VertexMap();
indices = new int[size];
}
}
private static class LineSubGeo {
public final Vec4d[] verts;
public String materialSymbol;
public LineSubGeo(int size) {
verts = new Vec4d[size];
}
}
private static class VisualScene {
public final ArrayList<SceneNode> nodes = new ArrayList<SceneNode>();
}
private static class Geometry {
public final Vector<FaceSubGeo> faceSubGeos = new Vector<FaceSubGeo>();
public final Vector<LineSubGeo> lineSubGeos = new Vector<LineSubGeo>();
}
private final URL _contextURL;
private final HashMap<String, Geometry> _geos = new HashMap<String, Geometry>();
private final HashMap<String, String> _images = new HashMap<String, String>(); // Maps image names to files
private final HashMap<String, String> _materials = new HashMap<String, String>(); // Maps materials to effects
private final HashMap<String, Effect> _effects = new HashMap<String, Effect>(); // List of known effects
private final HashMap<String, SceneNode> _namedNodes = new HashMap<String, SceneNode>();
private final HashMap<String, VisualScene> _visualScenes = new HashMap<String, VisualScene>();
// This stack is used to track node loops
private final Stack<SceneNode> _nodeStack = new Stack<SceneNode>();
// This list tracks the combinations of sub geometries and effects loaded in the mesh proto and defines an implicit
// index into the mesh proto. This should probably be made more explicit later
private final ArrayList<FaceSubGeo> _loadedFaceGeos = new ArrayList<FaceSubGeo>();
private final ArrayList<Effect> _loadedEffects = new ArrayList<Effect>();
private final ArrayList<LineGeoEffectPair> _loadedLineGeos = new ArrayList<LineGeoEffectPair>();
private MeshData _finalData = new MeshData();
private HashMap<String, Vec4d[]> _dataSources = new HashMap<String, Vec4d[]>();
private XmlNode _colladaNode;
private XmlParser _parser;
public ColParser(URL context) {
_contextURL = context;
}
private XmlNode getNodeFromID(String fragID) {
if (fragID.length() < 1 || fragID.charAt(0) != '#') {
return null;
}
return _parser.getNodeByID(fragID.substring(1));
}
private void processContent() {
_parser = new XmlParser(_contextURL);
_parser.setDoubleArrayTags(DOUBLE_ARRAY_TAGS);
_parser.setIntArrayTags(INT_ARRAY_TAGS);
_parser.setBooleanArrayTags(BOOLEAN_ARRAY_TAGS);
_parser.setStringArrayTags(STRING_ARRAY_TAGS);
_parser.parse();
_colladaNode = _parser.getRootNode().findChildTag("COLLADA", false);
parseAssert(_colladaNode != null);
processGeos();
processImages();
processMaterials();
processEffects();
processNodes();
processVisualScenes();
ConvexHull.buildTime = 0; ConvexHull.filterTime = 0; ConvexHull.finalizeTime = 0; ConvexHull.sortTime = 0;
processScene();
}
private double getScaleFactor() {
XmlNode assetNode = _colladaNode.findChildTag("asset", false);
if (assetNode == null) return 1;
XmlNode unit = assetNode.findChildTag("unit", false);
if (unit == null) return 1;
String meter = unit.getAttrib("meter");
if (meter == null) return 1;
return Double.parseDouble(meter);
}
private String getUpAxis() {
XmlNode assetNode = _colladaNode.findChildTag("asset", false);
if (assetNode == null) return "Y_UP";
XmlNode upAxisNode = assetNode.findChildTag("up_axis", false);
if (upAxisNode == null) return "Y_UP";
String ret = (String)upAxisNode.getContent();
if (ret == null) {
return "Y_UP";
}
return ret;
}
/**
* Returns a matrix that rotates which ever axis is specified into the Z axis (as JaamSim treats Z as up)
* @return
*/
private Mat4d getGlobalRot() {
String up = getUpAxis();
Mat4d ret = new Mat4d();
if (up.equals("Z_UP")) {
return ret;
} else if (up.equals("X_UP")) {
ret.d00 = 0; ret.d01 = 0; ret.d02 = 1;
ret.d10 = -1; ret.d11 = 0; ret.d12 = 0;
ret.d20 = 0; ret.d21 = -1; ret.d22 = 0;
return ret;
} else { // Y_UP
ret.d00 = 1; ret.d01 = 0; ret.d02 = 0;
ret.d10 = 0; ret.d11 = 0; ret.d12 = -1;
ret.d20 = 0; ret.d21 = 1; ret.d22 = 0;
return ret;
}
}
private void processScene() {
XmlNode scene = _colladaNode.findChildTag("scene", false);
parseAssert(scene != null);
XmlNode instVS = scene.findChildTag("instance_visual_scene", false);
parseAssert(instVS != null);
String vsURL = instVS.getAttrib("url");
parseAssert(vsURL.charAt(0) == '#');
Mat4d globalMat = getGlobalRot();
globalMat.scale3(getScaleFactor());
VisualScene vs = _visualScenes.get(vsURL.substring(1));
for (SceneNode sn : vs.nodes) {
visitNode(sn, globalMat);
}
_finalData.generateHull();
}
private void visitNode(SceneNode node, Mat4d parentMat) {
_nodeStack.push(node);
// Update the current transform
Mat4d currentMat = new Mat4d(parentMat);
currentMat.mult4(node.trans);
for (GeoInstInfo geoInfo : node.subGeo) {
addGeoInst(geoInfo, currentMat);
}
// Add instance_node
for (String nodeName : node.subInstanceNames) {
parseAssert(nodeName.charAt(0) == '#');
SceneNode instNode = _namedNodes.get(nodeName.substring(1));
// Check for reference loops, make sure this node is not currently in the active node stack
parseAssert(!_nodeStack.contains(instNode));
parseAssert(instNode != null);
node.subNodes.add(instNode);
}
// Finally continue visiting the scene
for (SceneNode nextNode : node.subNodes) {
visitNode(nextNode, currentMat);
}
_nodeStack.pop();
}
private Effect geoBindingToEffect(Map<String, String> materialMap, String symbol) {
String materialId = materialMap.get(symbol);
parseAssert(materialId != null);
parseAssert(materialId.charAt(0) == '#');
String effectId = _materials.get(materialId.substring(1));
parseAssert(effectId != null);
parseAssert(effectId.charAt(0) == '#');
Effect effect = _effects.get(effectId.substring(1));
parseAssert(effect != null);
return effect;
}
private void addGeoInst(GeoInstInfo geoInfo, Mat4d mat) {
parseAssert(geoInfo.geoName.charAt(0) == '#');
Geometry geo = _geos.get(geoInfo.geoName.substring(1));
for (FaceSubGeo subGeo : geo.faceSubGeos) {
// Check if this geometry and material pair has been loaded yet
Effect effect = geoBindingToEffect(geoInfo.materialMap, subGeo.materialSymbol);
int geoID;
if (_loadedFaceGeos.contains(subGeo)) {
geoID = _loadedFaceGeos.indexOf(subGeo);
} else {
geoID = _loadedFaceGeos.size();
_loadedFaceGeos.add(subGeo);
_finalData.addSubMesh(subGeo.vMap.getVertList(), subGeo.indices);
}
int matID;
if (_loadedEffects.contains(effect)) {
matID = _loadedEffects.indexOf(effect);
} else {
matID = _loadedEffects.size();
_loadedEffects.add(effect);
_finalData.addMaterial(effect.diffuse.texture,
effect.diffuse.color,
effect.transType, effect.transColour);
}
_finalData.addSubMeshInstance(geoID, matID, -1, mat, null);
}
for (LineSubGeo subGeo : geo.lineSubGeos) {
// Check if this geometry and material pair has been loaded yet
Effect effect = geoBindingToEffect(geoInfo.materialMap, subGeo.materialSymbol);
LineGeoEffectPair ge = new LineGeoEffectPair(subGeo, effect);
int geoID;
if (_loadedLineGeos.contains(ge)) {
geoID = _loadedLineGeos.indexOf(ge);
} else {
geoID = _loadedLineGeos.size();
_loadedLineGeos.add(ge);
_finalData.addSubLine(subGeo.verts,
effect.diffuse.color);
}
_finalData.addSubLineInstance(geoID, mat);
}
}
private void processVisualScenes() {
XmlNode libScenes = _colladaNode.findChildTag("library_visual_scenes", false);
if (libScenes == null)
return; // No scenes
for (XmlNode child : libScenes.children()) {
if (child.getTag().equals("visual_scene")) {
processVisualScene(child);
}
}
}
private void processVisualScene(XmlNode scene) {
String id = scene.getFragID();
VisualScene vs = new VisualScene();
_visualScenes.put(id, vs);
for (XmlNode child : scene.children()) {
if (child.getTag().equals("node")) {
SceneNode node = processNode(child, null);
vs.nodes.add(node);
}
}
}
private void processImages() {
XmlNode libImage = _colladaNode.findChildTag("library_images", false);
if (libImage == null)
return; // No images
for (XmlNode child : libImage.children()) {
if (child.getTag().equals("image")) {
processImage(child);
}
}
}
private void processImage(XmlNode imageNode) {
// For now all we care about with images is the init_form contents and the name
String id = imageNode.getFragID();
if (id == null) return; // We do not care about images we can not reference
XmlNode initFrom = imageNode.findChildTag("init_from", true);
if (initFrom == null) {
parseAssert(false);
return;
}
String fileName = (String)initFrom.getContent();
parseAssert(fileName != null);
_images.put(id, fileName);
}
private void processMaterials() {
XmlNode libMats = _colladaNode.findChildTag("library_materials", false);
if (libMats == null)
return; // No materials
for (XmlNode child : libMats.children()) {
if (child.getTag().equals("material")) {
processMaterial(child);
}
}
}
private void processMaterial(XmlNode matNode) {
String id = matNode.getFragID();
if (id == null) return; // We do not care about materials we can not reference
XmlNode instEffect = matNode.findChildTag("instance_effect", true);
if (instEffect == null) {
parseAssert(false);
return;
}
String effectURL = instEffect.getAttrib("url");
if (effectURL == null) {
parseAssert(false);
return;
}
_materials.put(id, effectURL);
}
private void processEffects() {
XmlNode libEffects = _colladaNode.findChildTag("library_effects", false);
if (libEffects == null)
return; // No effects
for (XmlNode child : libEffects.children()) {
if (child.getTag().equals("effect")) {
processEffect(child);
}
}
}
private void processEffect(XmlNode effectNode) {
String id = effectNode.getFragID();
if (id == null) return; // We do not care about materials we can not reference
XmlNode profCommon = effectNode.findChildTag("profile_COMMON", true);
if (profCommon == null) {
parseAssert(false);
return; // There is no common profile
}
HashMap<String, XmlNode> paramMap = new HashMap<String, XmlNode>();
// Start by building a table of all params
for (XmlNode child : profCommon.children()) {
String tag = child.getTag();
if (tag.equals("newparam")) {
String sid = child.getAttrib("sid");
if (sid != null) paramMap.put(sid, child);
}
}
XmlNode technique = profCommon.findChildTag("technique", false);
if (technique == null) {
parseAssert(false);
return; // There is no common profile
}
// Search technique for the kind of data we care about, for now find blinn, phong or lambert
XmlNode diffuse = null;
XmlNode transparency = null;
XmlNode transparent = null;
XmlNode blinn = technique.findChildTag("blinn", false);
XmlNode phong = technique.findChildTag("phong", false);
XmlNode lambert = technique.findChildTag("lambert", false);
XmlNode constant = technique.findChildTag("constant", false);
if (blinn != null) {
diffuse = blinn.findChildTag("diffuse", false);
transparency = blinn.findChildTag("transparency", false);
transparent = blinn.findChildTag("transparent", false);
}
if (phong != null) {
diffuse = phong.findChildTag("diffuse", false);
transparency = phong.findChildTag("transparency", false);
transparent = phong.findChildTag("transparent", false);
}
if (lambert != null) {
diffuse = lambert.findChildTag("diffuse", false);
transparency = lambert.findChildTag("transparency", false);
transparent = lambert.findChildTag("transparent", false);
}
if (constant != null) {
diffuse = constant.findChildTag("emission", false);
transparency = constant.findChildTag("transparency", false);
transparent = constant.findChildTag("transparent", false);
}
// Now either parse diffuse as a color value or texture...
Effect effect = new Effect();
ColorTex diffuseCT = null;
if (diffuse == null) {
diffuseCT = new ColorTex();
diffuseCT.color = new Color4d();
} else {
diffuseCT = getColorTex(diffuse, paramMap);
}
effect.diffuse = diffuseCT;
String opaque = null;
ColorTex transparentCT = null;
if (transparent != null) {
opaque = transparent.getAttrib("opaque");
transparentCT = getColorTex(transparent, paramMap);
}
// There is a ton of conditions for us to handle transparency
if (transparency != null &&
transparent != null &&
opaque != null &&
(opaque.equals("A_ONE") || opaque.equals("RGB_ZERO")) &&
transparentCT != null &&
transparentCT.color != null) {
XmlNode floatNode = transparency.findChildTag("float", false);
parseAssert(floatNode != null);
double alpha = Double.parseDouble((String)floatNode.getContent());
effect.transColour = new Color4d(transparentCT.color);
if (opaque.equals("A_ONE")) {
effect.transType = MeshData.A_ONE_TRANS;
}
if (opaque.equals("RGB_ZERO")) {
effect.transType = MeshData.RGB_ZERO_TRANS;
// Handle the weird luminance term for alpha in RGB_ZERO
effect.transColour.a = effect.transColour.r * 0.212671 +
effect.transColour.g * 0.715160 +
effect.transColour.b * 0.072169;
}
// Bake the transparency term into the colour
effect.transColour.r *= alpha;
effect.transColour.g *= alpha;
effect.transColour.b *= alpha;
effect.transColour.a *= alpha;
if ((effect.transColour.a >= 0.999 && opaque.equals("A_ONE")) ||
(effect.transColour.a <= 0.001 && opaque.equals("RGB_ZERO")) ) {
effect.transType = MeshData.NO_TRANS; // Some meshes are effectively not transparent despite having the information
}
} else {
effect.transType = MeshData.NO_TRANS;
}
_effects.put(id, effect);
}
private ColorTex getColorTex(XmlNode node, HashMap<String, XmlNode> paramMap) {
if (node.getNumChildren() != 1) {
parseAssert(false);
return null;
}
XmlNode valNode = node.getChild(0);
String tag = valNode.getTag();
ColorTex ret = new ColorTex();
if (tag.equals("color")) {
double[] colVals = (double[])valNode.getContent();
parseAssert(colVals != null && colVals.length >= 4);
Color4d col = new Color4d(colVals[0], colVals[1], colVals[2], colVals[3]);
ret.color = col;
return ret;
}
if (!tag.equals("texture")) {
parseAssert(false);
return null;
}
// Now we have the fun dealing with COLLADA's incredible indirectness
String texName = valNode.getAttrib("texture");
// Find this sampler in the map
XmlNode sampler = paramMap.get(texName);
parseAssert(sampler != null);
XmlNode sampler2D = sampler.findChildTag("sampler2D", false);
parseAssert(sampler2D != null);
XmlNode source = sampler2D.findChildTag("source", false);
parseAssert(source != null);
String surfaceName = (String)source.getContent();
XmlNode surfaceParam = paramMap.get(surfaceName);
XmlNode surface = surfaceParam.findChildTag("surface", false);
parseAssert(surface != null);
parseAssert(surface.getAttrib("type").equals("2D"));
XmlNode initFrom = surface.findChildTag("init_from", false);
parseAssert(initFrom != null);
String imageName = (String)initFrom.getContent();
String img = _images.get(imageName);
parseAssert(img != null);
try {
ret.texture = new URL(_contextURL, img);
} catch (MalformedURLException ex) {
ex.printStackTrace();
parseAssert(false);
}
return ret;
}
private void processGeos() {
XmlNode libGeo = _colladaNode.findChildTag("library_geometries", false);
if (libGeo == null)
return; // No geometries
for (XmlNode child : libGeo.children()) {
if (child.getTag().equals("geometry")) {
processGeo(child);
}
}
}
private void processGeo(XmlNode geoNode) {
String geoID = geoNode.getFragID();
if (geoID == null) {
// This geometry can not be referenced, don't bother
return;
}
Geometry geoData = new Geometry();
for (XmlNode meshNode : geoNode.children()) {
if (meshNode.getTag() == "mesh") {
parseMesh(meshNode, geoData);
}
}
_geos.put(geoID, geoData);
}
private void processNodes() {
XmlNode libNodes = _colladaNode.findChildTag("library_nodes", false);
if (libNodes == null)
return; // No images
for (XmlNode child : libNodes.children()) {
if (child.getTag().equals("node")) {
processNode(child, null);
}
}
}
private SceneNode processNode(XmlNode node, SceneNode parent) {
SceneNode sn = new SceneNode();
sn.id = node.getFragID();
if (sn.id != null) _namedNodes.put(sn.id, sn);
if (parent != null) {
parent.subNodes.add(sn);
}
// Build up the transformation matrix for this node
for (XmlNode child : node.children()) {
String childTag = child.getTag();
Mat4d mat = null;
if (childTag.equals("translate")) {
mat = transToMat(child);
}
if (childTag.equals("rotate")) {
mat = rotToMat(child);
}
if (childTag.equals("scale")) {
mat = scaleToMat(child);
}
if (childTag.equals("matrix")) {
mat = matToMat(child);
}
if (mat != null) {
sn.trans.mult4(mat);
}
}
// Now handle sub geometry, sub nodes and instance nodes
for (XmlNode child : node.children()) {
String childTag = child.getTag();
if (childTag.equals("instance_geometry")) {
GeoInstInfo geoInfo = processInstGeo(child);
sn.subGeo.add(geoInfo);
}
if (childTag.equals("instance_node")) {
String nodeID = child.getAttrib("url");
parseAssert(nodeID != null);
sn.subInstanceNames.add(nodeID);
}
if (childTag.equals("node")) {
processNode(child, sn);
}
}
return sn;
}
private GeoInstInfo processInstGeo(XmlNode instGeo) {
GeoInstInfo instInfo = new GeoInstInfo();
instInfo.geoName = instGeo.getAttrib("url");
XmlNode bindMat = instGeo.findChildTag("bind_material", false);
if (bindMat == null) return instInfo;
XmlNode techCommon = bindMat.findChildTag("technique_common", false);
parseAssert(techCommon != null);
for (XmlNode instMat : techCommon.children()) {
if (!instMat.getTag().equals("instance_material")) {
continue;
}
String symbol = instMat.getAttrib("symbol");
String target = instMat.getAttrib("target");
parseAssert(symbol != null && target != null);
instInfo.materialMap.put(symbol, target);
// TODO, properly handle rebinding vertex inputs
}
return instInfo;
}
private Mat4d transToMat(XmlNode transNode) {
double[] vals = (double[])transNode.getContent();
parseAssert(vals != null && vals.length >= 3);
Vec3d transVect = new Vec3d(vals[0], vals[1], vals[2]);
Mat4d ret = new Mat4d();
ret.setTranslate3(transVect);
return ret;
}
private Mat4d rotToMat(XmlNode rotNode) {
double[] vals = (double[])rotNode.getContent();
parseAssert(vals != null && vals.length >= 4);
Vec3d axis = new Vec3d(vals[0], vals[1], vals[2]);
double rads = (float)Math.toRadians(vals[3]);
Quaternion rot = new Quaternion();
rot.setAxisAngle(axis, rads);
Mat4d ret = new Mat4d();
ret.setRot3(rot);
return ret;
}
private Mat4d scaleToMat(XmlNode scaleNode) {
double[] vals = (double[])scaleNode.getContent();
parseAssert(vals != null && vals.length >= 3);
Vec3d scaleVect = new Vec3d(vals[0], vals[1], vals[2]);
Mat4d ret = new Mat4d();
ret.scaleCols3(scaleVect);
return ret;
}
private Mat4d matToMat(XmlNode scaleNode) {
double[] vals = (double[])scaleNode.getContent();
parseAssert(vals != null && vals.length >= 16);
Mat4d ret = new Mat4d(vals);
return ret;
}
private void parseMesh(XmlNode mesh, Geometry geoData) {
// Now try to parse geometry type
for (XmlNode subGeo : mesh.children()) {
String geoTag = subGeo.getTag();
if (geoTag.equals("polylist") ||
geoTag.equals("polygons") ||
geoTag.equals("triangles")) {
generateTriangleGeo(subGeo, geoData);
}
if (geoTag.equals("lines") ||
geoTag.equals("linestrip")) {
generateLineGeo(subGeo, geoData);
}
}
}
private void generateLineGeo(XmlNode subGeo, Geometry geoData) {
String geoTag = subGeo.getTag();
SubMeshDesc smd = readGeometryInputs(subGeo);
if (geoTag.equals("lines")) {
parseLines(smd, subGeo);
}
if (geoTag.equals("linestrip")) {
parseLinestrip(smd, subGeo);
}
int numVerts = smd.posDesc.indices.length;
parseAssert(numVerts % 2 == 0);
// Now the SubMeshDesc should be fully populated, and we can actually produce the final triangle arrays
LineSubGeo lsg = new LineSubGeo(numVerts);
Vec4d[] posData = getDataArrayFromSource(smd.posDesc.source);
lsg.materialSymbol = subGeo.getAttrib("material");
parseAssert(lsg.materialSymbol != null);
for (int i = 0; i < numVerts; ++i) {
lsg.verts[i] = posData[smd.posDesc.indices[i]];
lsg.verts[i].w = 1;
}
geoData.lineSubGeos.add(lsg);
}
private Vec4d generateNormal(Vec4d p0, Vec4d p1, Vec4d p2, Vec4d t0, Vec4d t1) {
t0.sub3(p1, p0);
t1.sub3(p2, p0);
Vec4d norm = new Vec4d();
norm.cross3(t0, t1);
norm.normalize3();
norm.w = 0;
return norm;
}
private void generateTriangleGeo(XmlNode subGeo, Geometry geoData) {
String geoTag = subGeo.getTag();
SubMeshDesc smd = readGeometryInputs(subGeo);
boolean hasNormal = smd.normDesc != null;
if (geoTag.equals("triangles")) {
parseTriangles(smd, subGeo);
}
if (geoTag.equals("polylist")) {
parsePolylist(smd, subGeo);
}
if (geoTag.equals("polygons")) {
parsePolygons(smd, subGeo);
}
int numVerts = smd.posDesc.indices.length;
parseAssert(numVerts % 3 == 0);
if (numVerts == 0) {
return;
}
// Now the SubMeshDesc should be fully populated, and we can actually produce the final triangle arrays
boolean hasTexCoords = (smd.texCoordDesc != null);
FaceSubGeo fsg = new FaceSubGeo(numVerts);
Vec4d[] posData = getDataArrayFromSource(smd.posDesc.source);
Vec4d[] normData = null;
if (hasNormal) {
normData = getDataArrayFromSource(smd.normDesc.source);
}
Vec4d[] texCoordData = null;
if (hasTexCoords)
texCoordData = getDataArrayFromSource(smd.texCoordDesc.source);
fsg.materialSymbol = subGeo.getAttrib("material");
parseAssert(fsg.materialSymbol != null);
Vec4d t0 = new Vec4d();
Vec4d t1 = new Vec4d();
Vec4d[] generatedNormals = null;
if (!hasNormal) {
// Generate one normal per face
generatedNormals = new Vec4d[numVerts/3];
for (int i = 0; i < numVerts / 3; ++i) {
Vec4d p0 = posData[smd.posDesc.indices[i*3 + 0]];
Vec4d p1 = posData[smd.posDesc.indices[i*3 + 1]];
Vec4d p2 = posData[smd.posDesc.indices[i*3 + 2]];
Vec4d norm = generateNormal(p0, p1, p2, t0, t1);
generatedNormals[i] = norm;
}
}
for (int i = 0; i < numVerts; ++i) {
Vec4d pos = new Vec4d(posData[smd.posDesc.indices[i]]);
pos.w = 1;
Vec4d normal = null;
if (hasNormal) {
// Make sure the normal is actually present, treat negative indices as missing normals
int normInd = smd.normDesc.indices[i];
if (normInd < 0) {
// We need to generate one
int triInd = i/3;
Vec4d p0 = posData[smd.posDesc.indices[triInd*3 + 0]];
Vec4d p1 = posData[smd.posDesc.indices[triInd*3 + 1]];
Vec4d p2 = posData[smd.posDesc.indices[triInd*3 + 2]];
normal = generateNormal(p0, p1, p2, t0, t1);
}
else {
normal = new Vec4d(normData[normInd]);
normal.w = 0;
}
} else {
normal = generatedNormals[i/3];
}
Vec4d texCoord = null;
if (hasTexCoords) {
texCoord = texCoordData[smd.texCoordDesc.indices[i]];
}
fsg.indices[i] = fsg.vMap.getVertIndex(pos, normal, texCoord);
}
geoData.faceSubGeos.add(fsg);
}
private void readVertices(SubMeshDesc smd, int offset, XmlNode vertices) {
// Check vertices for inputs
for (XmlNode input : vertices.children()) {
if (input.getTag() != "input") {
continue;
}
String semantic = input.getAttrib("semantic");
String source = input.getAttrib("source");
if (source == null || semantic == null)
throw new ColException("Bad Vertex Input tag: " + input.getFragID() + " in mesh.");
if (semantic.equals("POSITION")) {
smd.posDesc = new DataDesc();
smd.posDesc.source = source;
smd.posDesc.offset = offset;
}
if (semantic.equals("NORMAL")) {
smd.normDesc = new DataDesc();
smd.normDesc.source = source;
smd.normDesc.offset = offset;
}
}
}
private void parseLines(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
XmlNode pNode = subGeo.findChildTag("p", false);
if (pNode == null)
throw new ColException("No 'p' child in 'lines' in mesh.");
int[] ps = (int[])pNode.getContent();
parseAssert(ps.length >= count * 2 * smd.stride);
smd.posDesc.indices = new int[count*2];
for (int i = 0; i < count * 2; ++i) {
int offset = i * smd.stride;
smd.posDesc.indices[i] = ps[offset + smd.posDesc.offset];
}
}
private void parseLinestrip(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
// There should be 'count' number of 'p' tags in this element
int[][] stripIndices = new int[count][];
int numLines = 0;
int nextIndex = 0;
for (XmlNode child : subGeo.children()) {
if (!child.getTag().equals("p")) continue;
int[] ps = (int[])child.getContent();
parseAssert(ps != null);
parseAssert(nextIndex < count);
stripIndices[nextIndex++] = ps;
numLines += ps.length - 1;
}
// We now have a list of list of all indices, split this into lines
int nextWriteIndex = 0;
smd.posDesc.indices = new int[numLines * 2];
for (int[] strip : stripIndices) {
parseAssert(strip.length >= 2);
for (int i = 1; i < strip.length; ++i) {
smd.posDesc.indices[nextWriteIndex++] = strip[i-1];
smd.posDesc.indices[nextWriteIndex++] = strip[i];
}
}
}
// Fill in the indices for 'smd'
private void parseTriangles(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
XmlNode pNode = subGeo.findChildTag("p", false);
if (count == 0) {
smd.posDesc.indices = new int[0];
return;
}
if (pNode == null)
throw new ColException("No 'p' child in 'triangles' in mesh.");
int[] ps = (int[])pNode.getContent();
parseAssert(ps.length >= count * 3 * smd.stride);
smd.posDesc.indices = new int[count*3];
if (smd.normDesc != null) {
smd.normDesc.indices = new int[count*3];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices = new int[count*3];
}
for (int i = 0; i < count * 3; ++i) {
int offset = i * smd.stride;
smd.posDesc.indices[i] = ps[offset + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[i] = ps[offset + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[i] = ps[offset + smd.texCoordDesc.offset];
}
}
}
// Note, this is definitely not correct, but for now assume all polygons are convex
private void parsePolylist(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
XmlNode pNode = subGeo.findChildTag("p", false);
if (pNode == null)
throw new ColException("No 'p' child in 'polygons' in mesh.");
int[] ps = (int[])pNode.getContent();
XmlNode vcountNode = subGeo.findChildTag("vcount", false);
int[] vcounts;
if (vcountNode != null)
vcounts = (int[])vcountNode.getContent();
else
vcounts = new int[0];
parseAssert(vcounts.length == count);
int totalVerts = 0;
int numTriangles = 0;
for (int i : vcounts) {
if (i == 0) {
continue;
}
totalVerts += i;
numTriangles += (i-2);
}
parseAssert(ps.length >= totalVerts * smd.stride);
smd.posDesc.indices = new int[numTriangles * 3];
if (smd.normDesc != null) {
smd.normDesc.indices = new int[numTriangles * 3];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices = new int[numTriangles * 3];
}
int nextWriteVert = 0;
int readVertOffset = 0;
for (int v : vcounts) {
if (v == 0) {
continue;
}
// v is the number of vertices in this polygon
parseAssert(v >= 3);
for (int i = 0; i < (v-2); ++i) {
int vert0 = readVertOffset + i;
int vert1 = readVertOffset + i + 1;
int vert2 = readVertOffset + v - 1;
smd.posDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
}
readVertOffset += v;
}
}
// Note, this is definitely not correct, but for now assume all polygons are convex
private void parsePolygons(SubMeshDesc smd, XmlNode subGeo) {
int numTriangles = 0;
// Find the number of triangles, for this we will need to iterate over all the polygons
for (XmlNode n : subGeo.children()) {
// Note: we do not support 'ph' tags (polygons with holes)
if (n.getTag() != "p") {
continue;
}
int[] ps = (int[])n.getContent();
int numVerts = ps.length / smd.stride;
parseAssert( (ps.length % smd.stride) == 0);
parseAssert(numVerts >= 3);
numTriangles += numVerts - 2;
}
smd.posDesc.indices = new int[numTriangles * 3];
if (smd.normDesc != null) {
smd.normDesc.indices = new int[numTriangles * 3];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices = new int[numTriangles * 3];
}
int nextWriteVert = 0;
for (XmlNode n : subGeo.children()) {
// Note: we do not support 'ph' tags (polygons with holes)
if (n.getTag() != "p") {
continue;
}
int[] ps = (int[])n.getContent();
for(int i = 0; i < (ps.length / smd.stride) - 2; ++i) {
int vert0 = 0;
int vert1 = i + 1;
int vert2 = i + 2;
smd.posDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
}
}
}
private SubMeshDesc readGeometryInputs(XmlNode subGeo) {
SubMeshDesc smd = new SubMeshDesc();
int maxOffset = 0;
for (XmlNode input : subGeo.children()) {
if (!input.getTag().equals("input")) {
continue;
}
String semantic = input.getAttrib("semantic");
String source = input.getAttrib("source");
if (source == null || semantic == null)
throw new ColException("Bad Geometry Input tag: " + input.getFragID());
int offset = Integer.parseInt(input.getAttrib("offset"));
if (offset > maxOffset) { maxOffset = offset; }
if (semantic.equals("VERTEX")) {
XmlNode vertices = getNodeFromID(source);
readVertices(smd, offset, vertices);
}
if (semantic.equals("NORMAL")) {
smd.normDesc = new DataDesc();
smd.normDesc.source = source;
smd.normDesc.offset = offset;
}
if (semantic.equals("TEXCOORD") ||
semantic.equals("TEXCOORD0")) {
smd.texCoordDesc = new DataDesc();
smd.texCoordDesc.source = source;
smd.texCoordDesc.offset = offset;
}
}
if (smd.posDesc == null) {
throw new ColException("Could not find positions for mesh.");
}
smd.stride = maxOffset+1;
return smd;
}
/**
* Return a meaningful list of Vectors from data source 'id'
* @param id
* @return
*/
Vec4d[] getDataArrayFromSource(String id) {
// First check the cache
Vec4d[] cached = _dataSources.get(id);
if (cached != null) {
return cached;
}
// Okay, this source hasn't be accessed yet
XmlNode sourceNode = getNodeFromID(id);
if (sourceNode == null) { throw new ColException("Could not find node with id: " + id); }
XmlNode floatNode = sourceNode.findChildTag("float_array", false);
if (floatNode == null) { throw new ColException("No float array in source: " + id); }
int floatCount = Integer.parseInt(floatNode.getAttrib("count"));
double[] values = (double[])floatNode.getContent();
XmlNode techCommon = sourceNode.findChildTag("technique_common", false);
if (techCommon == null) { throw new ColException("No technique_common in source: " + id); }
XmlNode accessor = techCommon.findChildTag("accessor", false);
if (accessor == null) { throw new ColException("No accessor in source: " + id); }
int stride = Integer.parseInt(accessor.getAttrib("stride"));
int count = Integer.parseInt(accessor.getAttrib("count"));
parseAssert(floatCount >= count * stride);
Vec4d[] ret = new Vec4d[count];
int valueOffset = 0;
for (int i = 0; i < count; ++i) {
switch(stride) {
case 2:
ret[i] = new Vec4d(values[valueOffset], values[valueOffset+1], 0, 1);
break;
case 3:
ret[i] = new Vec4d(values[valueOffset], values[valueOffset+1], values[valueOffset+2], 1);
break;
case 4:
ret[i] = new Vec4d(values[valueOffset], values[valueOffset+1], values[valueOffset+2], values[valueOffset+3]);
break;
}
valueOffset += stride;
}
_dataSources.put(id, ret);
return ret;
}
public MeshData getData() {
return _finalData;
}
/**
* This data structure is useful for turning COLLADA data arrays into flat arrays to be processed.
* @author matt.chudleigh
*
*/
private static class DataDesc {
// The fragID of the source
public String source;
// The offset in the index array
public int offset;
int[] indices;
}
private static class SubMeshDesc {
public DataDesc posDesc;
public DataDesc normDesc;
public DataDesc texCoordDesc;
public int stride;
}
/**
* Information for a geometry instance (as opposed to a geometry)
* for now this is simply a map from material binding symbols to actual material IDs
* and a geometry name
*/
private static class GeoInstInfo {
public String geoName;
public final Map<String, String> materialMap = new HashMap<String, String>();
}
/**
* SceneNode is basically a container for Collada "node" tags, this is needed to allow the system to walk the
* node tree and properly honour the instance nodes.
* @author matt.chudleigh
*/
private static class SceneNode {
public String id;
public final Mat4d trans = new Mat4d();
public final ArrayList<SceneNode> subNodes = new ArrayList<SceneNode>();
public final ArrayList<String> subInstanceNames = new ArrayList<String>();
public final ArrayList<GeoInstInfo> subGeo = new ArrayList<GeoInstInfo>();
}
/**
* A union like data structure, this is either a color value or texture (it should always be one or the other, but never both)
* @author matt.chudleigh
*
*/
private static class ColorTex {
public Color4d color;
public URL texture;
}
private static class Effect {
// Only hold diffuse colour for now
public ColorTex diffuse;
public int transType;
public Color4d transColour;
}
private static class LineGeoEffectPair {
public LineSubGeo geo;
public Effect effect;
@Override
public int hashCode() { return geo.hashCode() ^ effect.hashCode(); }
public LineGeoEffectPair(LineSubGeo g, Effect e) {
this.geo = g;
this.effect = e;
}
@Override
public boolean equals(Object o) {
if (o == null) return false;
if (!(o instanceof LineGeoEffectPair)) return false;
LineGeoEffectPair other = (LineGeoEffectPair)o;
return other.geo == geo && other.effect == effect;
}
}
}
|
src/main/java/com/jaamsim/collada/ColParser.java
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2012 Ausenco Engineering Canada Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package com.jaamsim.collada;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.Vector;
import javax.xml.parsers.SAXParserFactory;
import com.jaamsim.MeshFiles.MeshData;
import com.jaamsim.MeshFiles.VertexMap;
import com.jaamsim.math.Color4d;
import com.jaamsim.math.ConvexHull;
import com.jaamsim.math.Mat4d;
import com.jaamsim.math.Quaternion;
import com.jaamsim.math.Vec3d;
import com.jaamsim.math.Vec4d;
import com.jaamsim.render.RenderException;
import com.jaamsim.xml.XmlNode;
import com.jaamsim.xml.XmlParser;
/**
* Inspired by the Collada loader for Sweethome3d by Emmanuel Puybaret / eTeks <info@eteks.com>.
*/
public class ColParser {
public static MeshData parse(URL asset) throws RenderException {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(false);
try {
ColParser colParser = new ColParser(asset);
colParser.processContent();
return colParser.getData();
} catch (Exception e) {
e.printStackTrace();
throw new RenderException(e.getMessage());
}
}
private static void parseAssert(boolean b) {
if (!b) {
throw new RenderException("Failed Collada parsing assert");
}
}
static final List<String> DOUBLE_ARRAY_TAGS;
static final List<String> INT_ARRAY_TAGS;
static final List<String> STRING_ARRAY_TAGS;
static final List<String> BOOLEAN_ARRAY_TAGS;
static {
DOUBLE_ARRAY_TAGS = new ArrayList<String>();
DOUBLE_ARRAY_TAGS.add("float_array");
DOUBLE_ARRAY_TAGS.add("rotate");
DOUBLE_ARRAY_TAGS.add("translate");
DOUBLE_ARRAY_TAGS.add("scale");
DOUBLE_ARRAY_TAGS.add("lookat");
DOUBLE_ARRAY_TAGS.add("matrix");
DOUBLE_ARRAY_TAGS.add("color");
INT_ARRAY_TAGS = new ArrayList<String>();
INT_ARRAY_TAGS.add("int_array");
INT_ARRAY_TAGS.add("vcount");
INT_ARRAY_TAGS.add("p");
INT_ARRAY_TAGS.add("h");
INT_ARRAY_TAGS.add("v");
STRING_ARRAY_TAGS = new ArrayList<String>();
STRING_ARRAY_TAGS.add("Name_array");
STRING_ARRAY_TAGS.add("IDREF_array");
BOOLEAN_ARRAY_TAGS = new ArrayList<String>();
BOOLEAN_ARRAY_TAGS.add("boolean_array");
}
private static class FaceSubGeo {
public VertexMap vMap;
int[] indices;
public String materialSymbol;
public FaceSubGeo(int size) {
vMap = new VertexMap();
indices = new int[size];
}
}
private static class LineSubGeo {
public final Vec4d[] verts;
public String materialSymbol;
public LineSubGeo(int size) {
verts = new Vec4d[size];
}
}
private static class VisualScene {
public final ArrayList<SceneNode> nodes = new ArrayList<SceneNode>();
}
private static class Geometry {
public final Vector<FaceSubGeo> faceSubGeos = new Vector<FaceSubGeo>();
public final Vector<LineSubGeo> lineSubGeos = new Vector<LineSubGeo>();
}
private final URL _contextURL;
private final HashMap<String, Geometry> _geos = new HashMap<String, Geometry>();
private final HashMap<String, String> _images = new HashMap<String, String>(); // Maps image names to files
private final HashMap<String, String> _materials = new HashMap<String, String>(); // Maps materials to effects
private final HashMap<String, Effect> _effects = new HashMap<String, Effect>(); // List of known effects
private final HashMap<String, SceneNode> _namedNodes = new HashMap<String, SceneNode>();
private final HashMap<String, VisualScene> _visualScenes = new HashMap<String, VisualScene>();
// This stack is used to track node loops
private final Stack<SceneNode> _nodeStack = new Stack<SceneNode>();
// This list tracks the combinations of sub geometries and effects loaded in the mesh proto and defines an implicit
// index into the mesh proto. This should probably be made more explicit later
private final ArrayList<FaceSubGeo> _loadedFaceGeos = new ArrayList<FaceSubGeo>();
private final ArrayList<Effect> _loadedEffects = new ArrayList<Effect>();
private final ArrayList<LineGeoEffectPair> _loadedLineGeos = new ArrayList<LineGeoEffectPair>();
private MeshData _finalData = new MeshData();
private HashMap<String, Vec4d[]> _dataSources = new HashMap<String, Vec4d[]>();
private XmlNode _colladaNode;
private XmlParser _parser;
public ColParser(URL context) {
_contextURL = context;
}
private XmlNode getNodeFromID(String fragID) {
if (fragID.length() < 1 || fragID.charAt(0) != '#') {
return null;
}
return _parser.getNodeByID(fragID.substring(1));
}
private void processContent() {
_parser = new XmlParser(_contextURL);
_parser.setDoubleArrayTags(DOUBLE_ARRAY_TAGS);
_parser.setIntArrayTags(INT_ARRAY_TAGS);
_parser.setBooleanArrayTags(BOOLEAN_ARRAY_TAGS);
_parser.setStringArrayTags(STRING_ARRAY_TAGS);
_parser.parse();
_colladaNode = _parser.getRootNode().findChildTag("COLLADA", false);
parseAssert(_colladaNode != null);
processGeos();
processImages();
processMaterials();
processEffects();
processNodes();
processVisualScenes();
ConvexHull.buildTime = 0; ConvexHull.filterTime = 0; ConvexHull.finalizeTime = 0; ConvexHull.sortTime = 0;
processScene();
}
private double getScaleFactor() {
XmlNode assetNode = _colladaNode.findChildTag("asset", false);
if (assetNode == null) return 1;
XmlNode unit = assetNode.findChildTag("unit", false);
if (unit == null) return 1;
String meter = unit.getAttrib("meter");
if (meter == null) return 1;
return Double.parseDouble(meter);
}
private String getUpAxis() {
XmlNode assetNode = _colladaNode.findChildTag("asset", false);
if (assetNode == null) return "Y_UP";
XmlNode upAxisNode = assetNode.findChildTag("up_axis", false);
if (upAxisNode == null) return "Y_UP";
String ret = (String)upAxisNode.getContent();
if (ret == null) {
return "Y_UP";
}
return ret;
}
/**
* Returns a matrix that rotates which ever axis is specified into the Z axis (as JaamSim treats Z as up)
* @return
*/
private Mat4d getGlobalRot() {
String up = getUpAxis();
Mat4d ret = new Mat4d();
if (up.equals("Z_UP")) {
return ret;
} else if (up.equals("X_UP")) {
ret.d00 = 0; ret.d01 = 0; ret.d02 = 1;
ret.d10 = -1; ret.d11 = 0; ret.d12 = 0;
ret.d20 = 0; ret.d21 = -1; ret.d22 = 0;
return ret;
} else { // Y_UP
ret.d00 = 1; ret.d01 = 0; ret.d02 = 0;
ret.d10 = 0; ret.d11 = 0; ret.d12 = -1;
ret.d20 = 0; ret.d21 = 1; ret.d22 = 0;
return ret;
}
}
private void processScene() {
XmlNode scene = _colladaNode.findChildTag("scene", false);
parseAssert(scene != null);
XmlNode instVS = scene.findChildTag("instance_visual_scene", false);
parseAssert(instVS != null);
String vsURL = instVS.getAttrib("url");
parseAssert(vsURL.charAt(0) == '#');
Mat4d globalMat = getGlobalRot();
globalMat.scale3(getScaleFactor());
VisualScene vs = _visualScenes.get(vsURL.substring(1));
for (SceneNode sn : vs.nodes) {
visitNode(sn, globalMat);
}
_finalData.generateHull();
}
private void visitNode(SceneNode node, Mat4d parentMat) {
_nodeStack.push(node);
// Update the current transform
Mat4d currentMat = new Mat4d(parentMat);
currentMat.mult4(node.trans);
for (GeoInstInfo geoInfo : node.subGeo) {
addGeoInst(geoInfo, currentMat);
}
// Add instance_node
for (String nodeName : node.subInstanceNames) {
parseAssert(nodeName.charAt(0) == '#');
SceneNode instNode = _namedNodes.get(nodeName.substring(1));
// Check for reference loops, make sure this node is not currently in the active node stack
parseAssert(!_nodeStack.contains(instNode));
parseAssert(instNode != null);
node.subNodes.add(instNode);
}
// Finally continue visiting the scene
for (SceneNode nextNode : node.subNodes) {
visitNode(nextNode, currentMat);
}
_nodeStack.pop();
}
private Effect geoBindingToEffect(Map<String, String> materialMap, String symbol) {
String materialId = materialMap.get(symbol);
parseAssert(materialId != null);
parseAssert(materialId.charAt(0) == '#');
String effectId = _materials.get(materialId.substring(1));
parseAssert(effectId != null);
parseAssert(effectId.charAt(0) == '#');
Effect effect = _effects.get(effectId.substring(1));
parseAssert(effect != null);
return effect;
}
private void addGeoInst(GeoInstInfo geoInfo, Mat4d mat) {
parseAssert(geoInfo.geoName.charAt(0) == '#');
Geometry geo = _geos.get(geoInfo.geoName.substring(1));
for (FaceSubGeo subGeo : geo.faceSubGeos) {
// Check if this geometry and material pair has been loaded yet
Effect effect = geoBindingToEffect(geoInfo.materialMap, subGeo.materialSymbol);
int geoID;
if (_loadedFaceGeos.contains(subGeo)) {
geoID = _loadedFaceGeos.indexOf(subGeo);
} else {
geoID = _loadedFaceGeos.size();
_loadedFaceGeos.add(subGeo);
_finalData.addSubMesh(subGeo.vMap.getVertList(), subGeo.indices);
}
int matID;
if (_loadedEffects.contains(effect)) {
matID = _loadedEffects.indexOf(effect);
} else {
matID = _loadedEffects.size();
_loadedEffects.add(effect);
_finalData.addMaterial(effect.diffuse.texture,
effect.diffuse.color,
effect.transType, effect.transColour);
}
_finalData.addSubMeshInstance(geoID, matID, -1, mat, null);
}
for (LineSubGeo subGeo : geo.lineSubGeos) {
// Check if this geometry and material pair has been loaded yet
Effect effect = geoBindingToEffect(geoInfo.materialMap, subGeo.materialSymbol);
LineGeoEffectPair ge = new LineGeoEffectPair(subGeo, effect);
int geoID;
if (_loadedLineGeos.contains(ge)) {
geoID = _loadedLineGeos.indexOf(ge);
} else {
geoID = _loadedLineGeos.size();
_loadedLineGeos.add(ge);
_finalData.addSubLine(subGeo.verts,
effect.diffuse.color);
}
_finalData.addSubLineInstance(geoID, mat);
}
}
private void processVisualScenes() {
XmlNode libScenes = _colladaNode.findChildTag("library_visual_scenes", false);
if (libScenes == null)
return; // No scenes
for (XmlNode child : libScenes.children()) {
if (child.getTag().equals("visual_scene")) {
processVisualScene(child);
}
}
}
private void processVisualScene(XmlNode scene) {
String id = scene.getFragID();
VisualScene vs = new VisualScene();
_visualScenes.put(id, vs);
for (XmlNode child : scene.children()) {
if (child.getTag().equals("node")) {
SceneNode node = processNode(child, null);
vs.nodes.add(node);
}
}
}
private void processImages() {
XmlNode libImage = _colladaNode.findChildTag("library_images", false);
if (libImage == null)
return; // No images
for (XmlNode child : libImage.children()) {
if (child.getTag().equals("image")) {
processImage(child);
}
}
}
private void processImage(XmlNode imageNode) {
// For now all we care about with images is the init_form contents and the name
String id = imageNode.getFragID();
if (id == null) return; // We do not care about images we can not reference
XmlNode initFrom = imageNode.findChildTag("init_from", true);
if (initFrom == null) {
parseAssert(false);
return;
}
String fileName = (String)initFrom.getContent();
parseAssert(fileName != null);
_images.put(id, fileName);
}
private void processMaterials() {
XmlNode libMats = _colladaNode.findChildTag("library_materials", false);
if (libMats == null)
return; // No materials
for (XmlNode child : libMats.children()) {
if (child.getTag().equals("material")) {
processMaterial(child);
}
}
}
private void processMaterial(XmlNode matNode) {
String id = matNode.getFragID();
if (id == null) return; // We do not care about materials we can not reference
XmlNode instEffect = matNode.findChildTag("instance_effect", true);
if (instEffect == null) {
parseAssert(false);
return;
}
String effectURL = instEffect.getAttrib("url");
if (effectURL == null) {
parseAssert(false);
return;
}
_materials.put(id, effectURL);
}
private void processEffects() {
XmlNode libEffects = _colladaNode.findChildTag("library_effects", false);
if (libEffects == null)
return; // No effects
for (XmlNode child : libEffects.children()) {
if (child.getTag().equals("effect")) {
processEffect(child);
}
}
}
private void processEffect(XmlNode effectNode) {
String id = effectNode.getFragID();
if (id == null) return; // We do not care about materials we can not reference
XmlNode profCommon = effectNode.findChildTag("profile_COMMON", true);
if (profCommon == null) {
parseAssert(false);
return; // There is no common profile
}
HashMap<String, XmlNode> paramMap = new HashMap<String, XmlNode>();
// Start by building a table of all params
for (XmlNode child : profCommon.children()) {
String tag = child.getTag();
if (tag.equals("newparam")) {
String sid = child.getAttrib("sid");
if (sid != null) paramMap.put(sid, child);
}
}
XmlNode technique = profCommon.findChildTag("technique", false);
if (technique == null) {
parseAssert(false);
return; // There is no common profile
}
// Search technique for the kind of data we care about, for now find blinn, phong or lambert
XmlNode diffuse = null;
XmlNode transparency = null;
XmlNode transparent = null;
XmlNode blinn = technique.findChildTag("blinn", false);
XmlNode phong = technique.findChildTag("phong", false);
XmlNode lambert = technique.findChildTag("lambert", false);
XmlNode constant = technique.findChildTag("constant", false);
if (blinn != null) {
diffuse = blinn.findChildTag("diffuse", false);
transparency = blinn.findChildTag("transparency", false);
transparent = blinn.findChildTag("transparent", false);
}
if (phong != null) {
diffuse = phong.findChildTag("diffuse", false);
transparency = phong.findChildTag("transparency", false);
transparent = phong.findChildTag("transparent", false);
}
if (lambert != null) {
diffuse = lambert.findChildTag("diffuse", false);
transparency = lambert.findChildTag("transparency", false);
transparent = lambert.findChildTag("transparent", false);
}
if (constant != null) {
diffuse = constant.findChildTag("emission", false);
transparency = constant.findChildTag("transparency", false);
transparent = constant.findChildTag("transparent", false);
}
// Now either parse diffuse as a color value or texture...
Effect effect = new Effect();
ColorTex diffuseCT = null;
if (diffuse == null) {
diffuseCT = new ColorTex();
diffuseCT.color = new Color4d();
} else {
diffuseCT = getColorTex(diffuse, paramMap);
}
effect.diffuse = diffuseCT;
String opaque = null;
ColorTex transparentCT = null;
if (transparent != null) {
opaque = transparent.getAttrib("opaque");
transparentCT = getColorTex(transparent, paramMap);
}
// There is a ton of conditions for us to handle transparency
if (transparency != null &&
transparent != null &&
opaque != null &&
(opaque.equals("A_ONE") || opaque.equals("RGB_ZERO")) &&
transparentCT != null &&
transparentCT.color != null) {
XmlNode floatNode = transparency.findChildTag("float", false);
parseAssert(floatNode != null);
double alpha = Double.parseDouble((String)floatNode.getContent());
effect.transColour = new Color4d(transparentCT.color);
if (opaque.equals("A_ONE")) {
effect.transType = MeshData.A_ONE_TRANS;
}
if (opaque.equals("RGB_ZERO")) {
effect.transType = MeshData.RGB_ZERO_TRANS;
// Handle the weird luminance term for alpha in RGB_ZERO
effect.transColour.a = effect.transColour.r * 0.212671 +
effect.transColour.g * 0.715160 +
effect.transColour.b * 0.072169;
}
// Bake the transparency term into the colour
effect.transColour.r *= alpha;
effect.transColour.g *= alpha;
effect.transColour.b *= alpha;
effect.transColour.a *= alpha;
if ((effect.transColour.a >= 0.999 && opaque.equals("A_ONE")) ||
(effect.transColour.a <= 0.001 && opaque.equals("RGB_ZERO")) ) {
effect.transType = MeshData.NO_TRANS; // Some meshes are effectively not transparent despite having the information
}
} else {
effect.transType = MeshData.NO_TRANS;
}
_effects.put(id, effect);
}
private ColorTex getColorTex(XmlNode node, HashMap<String, XmlNode> paramMap) {
if (node.getNumChildren() != 1) {
parseAssert(false);
return null;
}
XmlNode valNode = node.getChild(0);
String tag = valNode.getTag();
ColorTex ret = new ColorTex();
if (tag.equals("color")) {
double[] colVals = (double[])valNode.getContent();
parseAssert(colVals != null && colVals.length >= 4);
Color4d col = new Color4d(colVals[0], colVals[1], colVals[2], colVals[3]);
ret.color = col;
return ret;
}
if (!tag.equals("texture")) {
parseAssert(false);
return null;
}
// Now we have the fun dealing with COLLADA's incredible indirectness
String texName = valNode.getAttrib("texture");
// Find this sampler in the map
XmlNode sampler = paramMap.get(texName);
parseAssert(sampler != null);
XmlNode sampler2D = sampler.findChildTag("sampler2D", false);
parseAssert(sampler2D != null);
XmlNode source = sampler2D.findChildTag("source", false);
parseAssert(source != null);
String surfaceName = (String)source.getContent();
XmlNode surfaceParam = paramMap.get(surfaceName);
XmlNode surface = surfaceParam.findChildTag("surface", false);
parseAssert(surface != null);
parseAssert(surface.getAttrib("type").equals("2D"));
XmlNode initFrom = surface.findChildTag("init_from", false);
parseAssert(initFrom != null);
String imageName = (String)initFrom.getContent();
String img = _images.get(imageName);
parseAssert(img != null);
try {
ret.texture = new URL(_contextURL, img);
} catch (MalformedURLException ex) {
ex.printStackTrace();
parseAssert(false);
}
return ret;
}
private void processGeos() {
XmlNode libGeo = _colladaNode.findChildTag("library_geometries", false);
if (libGeo == null)
return; // No geometries
for (XmlNode child : libGeo.children()) {
if (child.getTag().equals("geometry")) {
processGeo(child);
}
}
}
private void processGeo(XmlNode geoNode) {
String geoID = geoNode.getFragID();
if (geoID == null) {
// This geometry can not be referenced, don't bother
return;
}
Geometry geoData = new Geometry();
for (XmlNode meshNode : geoNode.children()) {
if (meshNode.getTag() == "mesh") {
parseMesh(meshNode, geoData);
}
}
_geos.put(geoID, geoData);
}
private void processNodes() {
XmlNode libNodes = _colladaNode.findChildTag("library_nodes", false);
if (libNodes == null)
return; // No images
for (XmlNode child : libNodes.children()) {
if (child.getTag().equals("node")) {
processNode(child, null);
}
}
}
private SceneNode processNode(XmlNode node, SceneNode parent) {
SceneNode sn = new SceneNode();
sn.id = node.getFragID();
if (sn.id != null) _namedNodes.put(sn.id, sn);
if (parent != null) {
parent.subNodes.add(sn);
}
// Build up the transformation matrix for this node
for (XmlNode child : node.children()) {
String childTag = child.getTag();
Mat4d mat = null;
if (childTag.equals("translate")) {
mat = transToMat(child);
}
if (childTag.equals("rotate")) {
mat = rotToMat(child);
}
if (childTag.equals("scale")) {
mat = scaleToMat(child);
}
if (childTag.equals("matrix")) {
mat = matToMat(child);
}
if (mat != null) {
sn.trans.mult4(mat);
}
}
// Now handle sub geometry, sub nodes and instance nodes
for (XmlNode child : node.children()) {
String childTag = child.getTag();
if (childTag.equals("instance_geometry")) {
GeoInstInfo geoInfo = processInstGeo(child);
sn.subGeo.add(geoInfo);
}
if (childTag.equals("instance_node")) {
String nodeID = child.getAttrib("url");
parseAssert(nodeID != null);
sn.subInstanceNames.add(nodeID);
}
if (childTag.equals("node")) {
processNode(child, sn);
}
}
return sn;
}
private GeoInstInfo processInstGeo(XmlNode instGeo) {
GeoInstInfo instInfo = new GeoInstInfo();
instInfo.geoName = instGeo.getAttrib("url");
XmlNode bindMat = instGeo.findChildTag("bind_material", false);
if (bindMat == null) return instInfo;
XmlNode techCommon = bindMat.findChildTag("technique_common", false);
parseAssert(techCommon != null);
for (XmlNode instMat : techCommon.children()) {
if (!instMat.getTag().equals("instance_material")) {
continue;
}
String symbol = instMat.getAttrib("symbol");
String target = instMat.getAttrib("target");
parseAssert(symbol != null && target != null);
instInfo.materialMap.put(symbol, target);
// TODO, properly handle rebinding vertex inputs
}
return instInfo;
}
private Mat4d transToMat(XmlNode transNode) {
double[] vals = (double[])transNode.getContent();
parseAssert(vals != null && vals.length >= 3);
Vec3d transVect = new Vec3d(vals[0], vals[1], vals[2]);
Mat4d ret = new Mat4d();
ret.setTranslate3(transVect);
return ret;
}
private Mat4d rotToMat(XmlNode rotNode) {
double[] vals = (double[])rotNode.getContent();
parseAssert(vals != null && vals.length >= 4);
Vec3d axis = new Vec3d(vals[0], vals[1], vals[2]);
double rads = (float)Math.toRadians(vals[3]);
Quaternion rot = new Quaternion();
rot.setAxisAngle(axis, rads);
Mat4d ret = new Mat4d();
ret.setRot3(rot);
return ret;
}
private Mat4d scaleToMat(XmlNode scaleNode) {
double[] vals = (double[])scaleNode.getContent();
parseAssert(vals != null && vals.length >= 3);
Vec3d scaleVect = new Vec3d(vals[0], vals[1], vals[2]);
Mat4d ret = new Mat4d();
ret.scaleCols3(scaleVect);
return ret;
}
private Mat4d matToMat(XmlNode scaleNode) {
double[] vals = (double[])scaleNode.getContent();
parseAssert(vals != null && vals.length >= 16);
Mat4d ret = new Mat4d(vals);
return ret;
}
private void parseMesh(XmlNode mesh, Geometry geoData) {
// Now try to parse geometry type
for (XmlNode subGeo : mesh.children()) {
String geoTag = subGeo.getTag();
if (geoTag.equals("polylist") ||
geoTag.equals("polygons") ||
geoTag.equals("triangles")) {
generateTriangleGeo(subGeo, geoData);
}
if (geoTag.equals("lines") ||
geoTag.equals("linestrip")) {
generateLineGeo(subGeo, geoData);
}
}
}
private void generateLineGeo(XmlNode subGeo, Geometry geoData) {
String geoTag = subGeo.getTag();
SubMeshDesc smd = readGeometryInputs(subGeo);
if (geoTag.equals("lines")) {
parseLines(smd, subGeo);
}
if (geoTag.equals("linestrip")) {
parseLinestrip(smd, subGeo);
}
int numVerts = smd.posDesc.indices.length;
parseAssert(numVerts % 2 == 0);
// Now the SubMeshDesc should be fully populated, and we can actually produce the final triangle arrays
LineSubGeo lsg = new LineSubGeo(numVerts);
Vec4d[] posData = getDataArrayFromSource(smd.posDesc.source);
lsg.materialSymbol = subGeo.getAttrib("material");
parseAssert(lsg.materialSymbol != null);
for (int i = 0; i < numVerts; ++i) {
lsg.verts[i] = posData[smd.posDesc.indices[i]];
lsg.verts[i].w = 1;
}
geoData.lineSubGeos.add(lsg);
}
private void generateTriangleGeo(XmlNode subGeo, Geometry geoData) {
String geoTag = subGeo.getTag();
SubMeshDesc smd = readGeometryInputs(subGeo);
boolean hasNormal = smd.normDesc != null;
if (geoTag.equals("triangles")) {
parseTriangles(smd, subGeo);
}
if (geoTag.equals("polylist")) {
parsePolylist(smd, subGeo);
}
if (geoTag.equals("polygons")) {
parsePolygons(smd, subGeo);
}
int numVerts = smd.posDesc.indices.length;
parseAssert(numVerts % 3 == 0);
if (numVerts == 0) {
return;
}
// Now the SubMeshDesc should be fully populated, and we can actually produce the final triangle arrays
boolean hasTexCoords = (smd.texCoordDesc != null);
FaceSubGeo fsg = new FaceSubGeo(numVerts);
Vec4d[] posData = getDataArrayFromSource(smd.posDesc.source);
Vec4d[] normData = null;
if (hasNormal) {
normData = getDataArrayFromSource(smd.normDesc.source);
}
Vec4d[] texCoordData = null;
if (hasTexCoords)
texCoordData = getDataArrayFromSource(smd.texCoordDesc.source);
fsg.materialSymbol = subGeo.getAttrib("material");
parseAssert(fsg.materialSymbol != null);
Vec4d[] generatedNormals = null;
if (!hasNormal) {
Vec4d t0 = new Vec4d();
Vec4d t1 = new Vec4d();
// Generate one normal per face
generatedNormals = new Vec4d[numVerts/3];
for (int i = 0; i < numVerts / 3; ++i) {
Vec4d p0 = posData[smd.posDesc.indices[i*3 + 0]];
Vec4d p1 = posData[smd.posDesc.indices[i*3 + 1]];
Vec4d p2 = posData[smd.posDesc.indices[i*3 + 2]];
t0.sub3(p1, p0);
t1.sub3(p2, p0);
Vec4d norm = new Vec4d();
norm.cross3(t0, t1);
norm.normalize3();
norm.w = 0;
generatedNormals[i] = norm;
}
}
for (int i = 0; i < numVerts; ++i) {
Vec4d pos = new Vec4d(posData[smd.posDesc.indices[i]]);
pos.w = 1;
Vec4d normal = null;
if (hasNormal) {
normal = new Vec4d(normData[smd.normDesc.indices[i]]);
normal.w = 0;
} else {
normal = generatedNormals[i/3];
}
Vec4d texCoord = null;
if (hasTexCoords) {
texCoord = texCoordData[smd.texCoordDesc.indices[i]];
}
fsg.indices[i] = fsg.vMap.getVertIndex(pos, normal, texCoord);
}
geoData.faceSubGeos.add(fsg);
}
private void readVertices(SubMeshDesc smd, int offset, XmlNode vertices) {
// Check vertices for inputs
for (XmlNode input : vertices.children()) {
if (input.getTag() != "input") {
continue;
}
String semantic = input.getAttrib("semantic");
String source = input.getAttrib("source");
if (source == null || semantic == null)
throw new ColException("Bad Vertex Input tag: " + input.getFragID() + " in mesh.");
if (semantic.equals("POSITION")) {
smd.posDesc = new DataDesc();
smd.posDesc.source = source;
smd.posDesc.offset = offset;
}
if (semantic.equals("NORMAL")) {
smd.normDesc = new DataDesc();
smd.normDesc.source = source;
smd.normDesc.offset = offset;
}
}
}
private void parseLines(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
XmlNode pNode = subGeo.findChildTag("p", false);
if (pNode == null)
throw new ColException("No 'p' child in 'lines' in mesh.");
int[] ps = (int[])pNode.getContent();
parseAssert(ps.length >= count * 2 * smd.stride);
smd.posDesc.indices = new int[count*2];
for (int i = 0; i < count * 2; ++i) {
int offset = i * smd.stride;
smd.posDesc.indices[i] = ps[offset + smd.posDesc.offset];
}
}
private void parseLinestrip(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
// There should be 'count' number of 'p' tags in this element
int[][] stripIndices = new int[count][];
int numLines = 0;
int nextIndex = 0;
for (XmlNode child : subGeo.children()) {
if (!child.getTag().equals("p")) continue;
int[] ps = (int[])child.getContent();
parseAssert(ps != null);
parseAssert(nextIndex < count);
stripIndices[nextIndex++] = ps;
numLines += ps.length - 1;
}
// We now have a list of list of all indices, split this into lines
int nextWriteIndex = 0;
smd.posDesc.indices = new int[numLines * 2];
for (int[] strip : stripIndices) {
parseAssert(strip.length >= 2);
for (int i = 1; i < strip.length; ++i) {
smd.posDesc.indices[nextWriteIndex++] = strip[i-1];
smd.posDesc.indices[nextWriteIndex++] = strip[i];
}
}
}
// Fill in the indices for 'smd'
private void parseTriangles(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
XmlNode pNode = subGeo.findChildTag("p", false);
if (count == 0) {
smd.posDesc.indices = new int[0];
return;
}
if (pNode == null)
throw new ColException("No 'p' child in 'triangles' in mesh.");
int[] ps = (int[])pNode.getContent();
parseAssert(ps.length >= count * 3 * smd.stride);
smd.posDesc.indices = new int[count*3];
if (smd.normDesc != null) {
smd.normDesc.indices = new int[count*3];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices = new int[count*3];
}
for (int i = 0; i < count * 3; ++i) {
int offset = i * smd.stride;
smd.posDesc.indices[i] = ps[offset + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[i] = ps[offset + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[i] = ps[offset + smd.texCoordDesc.offset];
}
}
}
// Note, this is definitely not correct, but for now assume all polygons are convex
private void parsePolylist(SubMeshDesc smd, XmlNode subGeo) {
int count = Integer.parseInt(subGeo.getAttrib("count"));
XmlNode pNode = subGeo.findChildTag("p", false);
if (pNode == null)
throw new ColException("No 'p' child in 'polygons' in mesh.");
int[] ps = (int[])pNode.getContent();
XmlNode vcountNode = subGeo.findChildTag("vcount", false);
int[] vcounts;
if (vcountNode != null)
vcounts = (int[])vcountNode.getContent();
else
vcounts = new int[0];
parseAssert(vcounts.length == count);
int totalVerts = 0;
int numTriangles = 0;
for (int i : vcounts) {
if (i == 0) {
continue;
}
totalVerts += i;
numTriangles += (i-2);
}
parseAssert(ps.length >= totalVerts * smd.stride);
smd.posDesc.indices = new int[numTriangles * 3];
if (smd.normDesc != null) {
smd.normDesc.indices = new int[numTriangles * 3];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices = new int[numTriangles * 3];
}
int nextWriteVert = 0;
int readVertOffset = 0;
for (int v : vcounts) {
if (v == 0) {
continue;
}
// v is the number of vertices in this polygon
parseAssert(v >= 3);
for (int i = 0; i < (v-2); ++i) {
int vert0 = readVertOffset + i;
int vert1 = readVertOffset + i + 1;
int vert2 = readVertOffset + v - 1;
smd.posDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
}
readVertOffset += v;
}
}
// Note, this is definitely not correct, but for now assume all polygons are convex
private void parsePolygons(SubMeshDesc smd, XmlNode subGeo) {
int numTriangles = 0;
// Find the number of triangles, for this we will need to iterate over all the polygons
for (XmlNode n : subGeo.children()) {
// Note: we do not support 'ph' tags (polygons with holes)
if (n.getTag() != "p") {
continue;
}
int[] ps = (int[])n.getContent();
int numVerts = ps.length / smd.stride;
parseAssert( (ps.length % smd.stride) == 0);
parseAssert(numVerts >= 3);
numTriangles += numVerts - 2;
}
smd.posDesc.indices = new int[numTriangles * 3];
if (smd.normDesc != null) {
smd.normDesc.indices = new int[numTriangles * 3];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices = new int[numTriangles * 3];
}
int nextWriteVert = 0;
for (XmlNode n : subGeo.children()) {
// Note: we do not support 'ph' tags (polygons with holes)
if (n.getTag() != "p") {
continue;
}
int[] ps = (int[])n.getContent();
for(int i = 0; i < (ps.length / smd.stride) - 2; ++i) {
int vert0 = 0;
int vert1 = i + 1;
int vert2 = i + 2;
smd.posDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert0*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert1*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
smd.posDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.posDesc.offset];
if (smd.normDesc != null) {
smd.normDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.normDesc.offset];
}
if (smd.texCoordDesc != null) {
smd.texCoordDesc.indices[nextWriteVert] = ps[(vert2*smd.stride) + smd.texCoordDesc.offset];
}
nextWriteVert++;
}
}
}
private SubMeshDesc readGeometryInputs(XmlNode subGeo) {
SubMeshDesc smd = new SubMeshDesc();
int maxOffset = 0;
for (XmlNode input : subGeo.children()) {
if (!input.getTag().equals("input")) {
continue;
}
String semantic = input.getAttrib("semantic");
String source = input.getAttrib("source");
if (source == null || semantic == null)
throw new ColException("Bad Geometry Input tag: " + input.getFragID());
int offset = Integer.parseInt(input.getAttrib("offset"));
if (offset > maxOffset) { maxOffset = offset; }
if (semantic.equals("VERTEX")) {
XmlNode vertices = getNodeFromID(source);
readVertices(smd, offset, vertices);
}
if (semantic.equals("NORMAL")) {
smd.normDesc = new DataDesc();
smd.normDesc.source = source;
smd.normDesc.offset = offset;
}
if (semantic.equals("TEXCOORD") ||
semantic.equals("TEXCOORD0")) {
smd.texCoordDesc = new DataDesc();
smd.texCoordDesc.source = source;
smd.texCoordDesc.offset = offset;
}
}
if (smd.posDesc == null) {
throw new ColException("Could not find positions for mesh.");
}
smd.stride = maxOffset+1;
return smd;
}
/**
* Return a meaningful list of Vectors from data source 'id'
* @param id
* @return
*/
Vec4d[] getDataArrayFromSource(String id) {
// First check the cache
Vec4d[] cached = _dataSources.get(id);
if (cached != null) {
return cached;
}
// Okay, this source hasn't be accessed yet
XmlNode sourceNode = getNodeFromID(id);
if (sourceNode == null) { throw new ColException("Could not find node with id: " + id); }
XmlNode floatNode = sourceNode.findChildTag("float_array", false);
if (floatNode == null) { throw new ColException("No float array in source: " + id); }
int floatCount = Integer.parseInt(floatNode.getAttrib("count"));
double[] values = (double[])floatNode.getContent();
XmlNode techCommon = sourceNode.findChildTag("technique_common", false);
if (techCommon == null) { throw new ColException("No technique_common in source: " + id); }
XmlNode accessor = techCommon.findChildTag("accessor", false);
if (accessor == null) { throw new ColException("No accessor in source: " + id); }
int stride = Integer.parseInt(accessor.getAttrib("stride"));
int count = Integer.parseInt(accessor.getAttrib("count"));
parseAssert(floatCount >= count * stride);
Vec4d[] ret = new Vec4d[count];
int valueOffset = 0;
for (int i = 0; i < count; ++i) {
switch(stride) {
case 2:
ret[i] = new Vec4d(values[valueOffset], values[valueOffset+1], 0, 1);
break;
case 3:
ret[i] = new Vec4d(values[valueOffset], values[valueOffset+1], values[valueOffset+2], 1);
break;
case 4:
ret[i] = new Vec4d(values[valueOffset], values[valueOffset+1], values[valueOffset+2], values[valueOffset+3]);
break;
}
valueOffset += stride;
}
_dataSources.put(id, ret);
return ret;
}
public MeshData getData() {
return _finalData;
}
/**
* This data structure is useful for turning COLLADA data arrays into flat arrays to be processed.
* @author matt.chudleigh
*
*/
private static class DataDesc {
// The fragID of the source
public String source;
// The offset in the index array
public int offset;
int[] indices;
}
private static class SubMeshDesc {
public DataDesc posDesc;
public DataDesc normDesc;
public DataDesc texCoordDesc;
public int stride;
}
/**
* Information for a geometry instance (as opposed to a geometry)
* for now this is simply a map from material binding symbols to actual material IDs
* and a geometry name
*/
private static class GeoInstInfo {
public String geoName;
public final Map<String, String> materialMap = new HashMap<String, String>();
}
/**
* SceneNode is basically a container for Collada "node" tags, this is needed to allow the system to walk the
* node tree and properly honour the instance nodes.
* @author matt.chudleigh
*/
private static class SceneNode {
public String id;
public final Mat4d trans = new Mat4d();
public final ArrayList<SceneNode> subNodes = new ArrayList<SceneNode>();
public final ArrayList<String> subInstanceNames = new ArrayList<String>();
public final ArrayList<GeoInstInfo> subGeo = new ArrayList<GeoInstInfo>();
}
/**
* A union like data structure, this is either a color value or texture (it should always be one or the other, but never both)
* @author matt.chudleigh
*
*/
private static class ColorTex {
public Color4d color;
public URL texture;
}
private static class Effect {
// Only hold diffuse colour for now
public ColorTex diffuse;
public int transType;
public Color4d transColour;
}
private static class LineGeoEffectPair {
public LineSubGeo geo;
public Effect effect;
@Override
public int hashCode() { return geo.hashCode() ^ effect.hashCode(); }
public LineGeoEffectPair(LineSubGeo g, Effect e) {
this.geo = g;
this.effect = e;
}
@Override
public boolean equals(Object o) {
if (o == null) return false;
if (!(o instanceof LineGeoEffectPair)) return false;
LineGeoEffectPair other = (LineGeoEffectPair)o;
return other.geo == geo && other.effect == effect;
}
}
}
|
JS: Collada importer 'fix'. Generate normals that have a negative index
The collada spec says nothing of negative indices, but if one is
encountered for a normal, treat that as missing and generate a new one.
Signed-off-by: Matt Chudleigh <0ca390ed93f516590d05286882efa52bb3b35616@ausenco.com>
Signed-off-by: Harvey Harrison <eadbd6b462bf3c97df0300a934c12bc2e5d1fe51@ausenco.com>
|
src/main/java/com/jaamsim/collada/ColParser.java
|
JS: Collada importer 'fix'. Generate normals that have a negative index
|
|
Java
|
apache-2.0
|
e872c8e2e9f57180e41455104dacfe1d63036153
| 0
|
LucasPickering/terrain-gen
|
package me.lucaspickering.groundwar.world.generate;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import me.lucaspickering.groundwar.util.Funcs;
import me.lucaspickering.groundwar.util.InclusiveRange;
import me.lucaspickering.groundwar.util.TilePoint;
import me.lucaspickering.groundwar.world.WorldHelper;
import me.lucaspickering.groundwar.world.tile.Tile;
public class PeakGenerator implements Generator {
// Generation parameters
private static final InclusiveRange PEAK_COUNT_RANGE = new InclusiveRange(4, 7);
private static final InclusiveRange PEAK_ELEVATION_RANGE = new InclusiveRange(45, 60);
private static final int MIN_PEAK_SEPARATION = 2; // Min distance between two peak
@Override
public void generate(Map<TilePoint, Tile.Builder> world, Random random) {
// Generate peaks
// Copy the key set because we're going to be modifying it
final Set<TilePoint> potentialPeaks = new HashSet<>(world.keySet());
final Set<TilePoint> peaks = new HashSet<>();
final int peaksToGen = PEAK_COUNT_RANGE.randomIn(random);
while (peaks.size() < peaksToGen && !potentialPeaks.isEmpty()) {
// Pick a random peak from the set of potential peaks
final TilePoint peak = Funcs.randomFromCollection(random, potentialPeaks);
peaks.add(peak); // Add it to the set
// Get all the tiles that are too close to this one to be peaks themselves,
// and remove them from the set of potential peaks
final Set<TilePoint> tooClose = WorldHelper.getTilesInRange(world.keySet(), peak,
MIN_PEAK_SEPARATION);
potentialPeaks.removeAll(tooClose);
}
for (TilePoint peak : peaks) {
// Pick a random elevation for the peak and assign it
final int elev = PEAK_ELEVATION_RANGE.randomIn(random);
world.get(peak).setElevation(elev);
}
}
}
|
src/main/java/me/lucaspickering/groundwar/world/generate/PeakGenerator.java
|
package me.lucaspickering.groundwar.world.generate;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import me.lucaspickering.groundwar.util.Funcs;
import me.lucaspickering.groundwar.util.InclusiveRange;
import me.lucaspickering.groundwar.util.TilePoint;
import me.lucaspickering.groundwar.world.WorldHelper;
import me.lucaspickering.groundwar.world.tile.Tile;
public class PeakGenerator implements Generator {
// Generation parameters
private static final InclusiveRange PEAKS_RANGE = new InclusiveRange(3, 5);
private static final int MIN_PEAK_SEPARATION = 2; // Min distance between two peak
@Override
public void generate(Map<TilePoint, Tile.Builder> world, Random random) {
// Generate peaks
// Copy the key set because we're going to be modifying it
final Set<TilePoint> potentialPeaks = new HashSet<>(world.keySet());
final Set<TilePoint> peaks = new HashSet<>();
final int peaksToGen = PEAKS_RANGE.randomIn(random);
while (peaks.size() < peaksToGen && !potentialPeaks.isEmpty()) {
// Pick a random peak from the set of potential peaks
final TilePoint peak = Funcs.randomFromCollection(random, potentialPeaks);
peaks.add(peak); // Add it to the set
// Get all the tiles that are too close to this one to be peaks themselves,
// and remove them from the set of potential peaks
final Set<TilePoint> tooClose = WorldHelper.getTilesInRange(world.keySet(), peak,
MIN_PEAK_SEPARATION);
potentialPeaks.removeAll(tooClose);
}
for (TilePoint peak : peaks) {
world.get(peak).setElevation(5); // Modify the peak tile
}
}
}
|
Change peak elev
|
src/main/java/me/lucaspickering/groundwar/world/generate/PeakGenerator.java
|
Change peak elev
|
|
Java
|
apache-2.0
|
4d88c7081a1d0631cb24df5ba5909de23e6f3ead
| 0
|
avafanasiev/groovy,aim-for-better/incubator-groovy,nobeans/incubator-groovy,nkhuyu/incubator-groovy,aim-for-better/incubator-groovy,samanalysis/incubator-groovy,armsargis/groovy,avafanasiev/groovy,adjohnson916/groovy-core,taoguan/incubator-groovy,apache/incubator-groovy,shils/groovy,mariogarcia/groovy-core,traneHead/groovy-core,graemerocher/incubator-groovy,ebourg/groovy-core,pickypg/incubator-groovy,gillius/incubator-groovy,guangying945/incubator-groovy,genqiang/incubator-groovy,taoguan/incubator-groovy,adjohnson916/incubator-groovy,bsideup/groovy-core,mariogarcia/groovy-core,paplorinc/incubator-groovy,taoguan/incubator-groovy,gillius/incubator-groovy,russel/incubator-groovy,EPadronU/incubator-groovy,groovy/groovy-core,shils/incubator-groovy,sagarsane/incubator-groovy,nkhuyu/incubator-groovy,apache/groovy,eginez/incubator-groovy,alien11689/groovy-core,groovy/groovy-core,dpolivaev/groovy,guangying945/incubator-groovy,shils/groovy,tkruse/incubator-groovy,groovy/groovy-core,russel/groovy,nkhuyu/incubator-groovy,paulk-asert/incubator-groovy,fpavageau/groovy,paulk-asert/incubator-groovy,jwagenleitner/groovy,yukangguo/incubator-groovy,ebourg/incubator-groovy,EPadronU/incubator-groovy,traneHead/groovy-core,i55ac/incubator-groovy,paplorinc/incubator-groovy,christoph-frick/groovy-core,eginez/incubator-groovy,ebourg/incubator-groovy,tkruse/incubator-groovy,alien11689/incubator-groovy,apache/groovy,shils/incubator-groovy,apache/groovy,jwagenleitner/incubator-groovy,PascalSchumacher/incubator-groovy,ebourg/groovy-core,PascalSchumacher/incubator-groovy,PascalSchumacher/incubator-groovy,adjohnson916/incubator-groovy,PascalSchumacher/incubator-groovy,jwagenleitner/incubator-groovy,kidaa/incubator-groovy,armsargis/groovy,dpolivaev/groovy,ebourg/incubator-groovy,antoaravinth/incubator-groovy,upadhyayap/incubator-groovy,nobeans/incubator-groovy,samanalysis/incubator-groovy,jwagenleitner/groovy,nobeans/incubator-groovy,adjohnson916/groovy-core,rabbitcount/incubator-groovy,aaronzirbes/incubator-groovy,sagarsane/incubator-groovy,rabbitcount/incubator-groovy,paulk-asert/groovy,sagarsane/groovy-core,pickypg/incubator-groovy,paplorinc/incubator-groovy,aim-for-better/incubator-groovy,samanalysis/incubator-groovy,alien11689/groovy-core,fpavageau/groovy,adjohnson916/incubator-groovy,dpolivaev/groovy,rlovtangen/groovy-core,aaronzirbes/incubator-groovy,yukangguo/incubator-groovy,rlovtangen/groovy-core,christoph-frick/groovy-core,graemerocher/incubator-groovy,antoaravinth/incubator-groovy,russel/groovy,bsideup/incubator-groovy,jwagenleitner/groovy,paulk-asert/groovy,eginez/incubator-groovy,jwagenleitner/incubator-groovy,EPadronU/incubator-groovy,dpolivaev/groovy,paulk-asert/groovy,traneHead/groovy-core,kidaa/incubator-groovy,kenzanmedia/incubator-groovy,bsideup/groovy-core,kenzanmedia/incubator-groovy,bsideup/groovy-core,rlovtangen/groovy-core,avafanasiev/groovy,pledbrook/incubator-groovy,upadhyayap/incubator-groovy,ChanJLee/incubator-groovy,guangying945/incubator-groovy,i55ac/incubator-groovy,bsideup/groovy-core,alien11689/incubator-groovy,aaronzirbes/incubator-groovy,yukangguo/incubator-groovy,bsideup/incubator-groovy,pickypg/incubator-groovy,aaronzirbes/incubator-groovy,ChanJLee/incubator-groovy,antoaravinth/incubator-groovy,pledbrook/incubator-groovy,avafanasiev/groovy,shils/groovy,gillius/incubator-groovy,apache/incubator-groovy,russel/incubator-groovy,adjohnson916/groovy-core,paulk-asert/incubator-groovy,i55ac/incubator-groovy,christoph-frick/groovy-core,sagarsane/groovy-core,russel/incubator-groovy,tkruse/incubator-groovy,jwagenleitner/groovy,armsargis/groovy,graemerocher/incubator-groovy,russel/groovy,taoguan/incubator-groovy,genqiang/incubator-groovy,fpavageau/groovy,antoaravinth/incubator-groovy,ebourg/incubator-groovy,eginez/incubator-groovy,christoph-frick/groovy-core,adjohnson916/groovy-core,kidaa/incubator-groovy,jwagenleitner/incubator-groovy,sagarsane/groovy-core,pickypg/incubator-groovy,alien11689/incubator-groovy,kidaa/incubator-groovy,i55ac/incubator-groovy,russel/groovy,fpavageau/groovy,upadhyayap/incubator-groovy,rabbitcount/incubator-groovy,russel/incubator-groovy,traneHead/groovy-core,adjohnson916/incubator-groovy,paulk-asert/incubator-groovy,guangying945/incubator-groovy,groovy/groovy-core,EPadronU/incubator-groovy,tkruse/incubator-groovy,nobeans/incubator-groovy,shils/groovy,bsideup/incubator-groovy,apache/incubator-groovy,ebourg/groovy-core,genqiang/incubator-groovy,paplorinc/incubator-groovy,groovy/groovy-core,sagarsane/groovy-core,ebourg/groovy-core,paulk-asert/groovy,gillius/incubator-groovy,bsideup/incubator-groovy,ebourg/groovy-core,nkhuyu/incubator-groovy,rabbitcount/incubator-groovy,upadhyayap/incubator-groovy,ChanJLee/incubator-groovy,christoph-frick/groovy-core,rlovtangen/groovy-core,kenzanmedia/incubator-groovy,rlovtangen/groovy-core,armsargis/groovy,samanalysis/incubator-groovy,ChanJLee/incubator-groovy,sagarsane/incubator-groovy,sagarsane/incubator-groovy,aim-for-better/incubator-groovy,mariogarcia/groovy-core,paulk-asert/incubator-groovy,adjohnson916/groovy-core,mariogarcia/groovy-core,alien11689/groovy-core,alien11689/incubator-groovy,mariogarcia/groovy-core,genqiang/incubator-groovy,apache/incubator-groovy,kenzanmedia/incubator-groovy,yukangguo/incubator-groovy,shils/incubator-groovy,alien11689/groovy-core,pledbrook/incubator-groovy,alien11689/groovy-core,graemerocher/incubator-groovy,apache/groovy,shils/incubator-groovy,sagarsane/groovy-core,PascalSchumacher/incubator-groovy,pledbrook/incubator-groovy
|
/*
$Id$
Copyright 2003 (C) James Strachan and Bob Mcwhirter. All Rights Reserved.
Redistribution and use of this software and associated documentation
("Software"), with or without modification, are permitted provided
that the following conditions are met:
1. Redistributions of source code must retain copyright
statements and notices. Redistributions must also contain a
copy of this document.
2. Redistributions in binary form must reproduce the
above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
3. The name "groovy" must not be used to endorse or promote
products derived from this Software without prior written
permission of The Codehaus. For written permission,
please contact info@codehaus.org.
4. Products derived from this Software may not be called "groovy"
nor may "groovy" appear in their names without prior written
permission of The Codehaus. "groovy" is a registered
trademark of The Codehaus.
5. Due credit should be given to The Codehaus -
http://groovy.codehaus.org/
THIS SOFTWARE IS PROVIDED BY THE CODEHAUS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE CODEHAUS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package groovy.lang;
import java.beans.BeanInfo;
import java.beans.EventSetDescriptor;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
import java.security.AccessControlException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.classgen.ReflectorGenerator;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.Phases;
import org.codehaus.groovy.runtime.ClosureListener;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import org.codehaus.groovy.runtime.GroovyCategorySupport;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.InvokerInvocationException;
import org.codehaus.groovy.runtime.MethodClosure;
import org.codehaus.groovy.runtime.MethodHelper;
import org.codehaus.groovy.runtime.MethodKey;
import org.codehaus.groovy.runtime.NewInstanceMetaMethod;
import org.codehaus.groovy.runtime.NewStaticMetaMethod;
import org.codehaus.groovy.runtime.ReflectionMetaMethod;
import org.codehaus.groovy.runtime.Reflector;
import org.codehaus.groovy.runtime.TemporaryMethodKey;
import org.codehaus.groovy.runtime.TransformMetaMethod;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
/**
* Allows methods to be dynamically added to existing classes at runtime
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @author Guillaume Laforge
* @version $Revision$
*/
public class MetaClass {
private static final Logger log = Logger.getLogger(MetaClass.class.getName());
public static final Object[] EMPTY_ARRAY = {
};
public static Class[] EMPTY_TYPE_ARRAY = {
};
protected static final Object[] ARRAY_WITH_NULL = { null };
private static boolean useReflection = false;
private MetaClassRegistry registry;
private Class theClass;
private ClassNode classNode;
private Map methodIndex = new HashMap();
private Map staticMethodIndex = new HashMap();
private List newGroovyMethodsList = new ArrayList();
//private Map propertyDescriptors = Collections.synchronizedMap(new HashMap());
private Map propertyMap = Collections.synchronizedMap(new HashMap());
private Map listeners = new HashMap();
private Map methodCache = Collections.synchronizedMap(new HashMap());
private Map staticMethodCache = Collections.synchronizedMap(new HashMap());
private MetaMethod genericGetMethod;
private MetaMethod genericSetMethod;
private List constructors;
private List allMethods = new ArrayList();
private List interfaceMethods;
private Reflector reflector;
private boolean initialised;
// we only need one of these that can be reused over and over.
private MetaProperty arrayLengthProperty = new MetaArrayLengthProperty();
public MetaClass(MetaClassRegistry registry, final Class theClass) throws IntrospectionException {
this.registry = registry;
this.theClass = theClass;
constructors = Arrays.asList(theClass.getDeclaredConstructors());
addMethods(theClass);
// introspect
BeanInfo info = null;
try {
info =(BeanInfo) AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws IntrospectionException {
return Introspector.getBeanInfo(theClass);
}
});
} catch (PrivilegedActionException pae) {
if (pae.getException() instanceof IntrospectionException) {
throw (IntrospectionException) pae.getException();
} else {
throw new RuntimeException(pae.getException());
}
}
PropertyDescriptor[] descriptors = info.getPropertyDescriptors();
// build up the metaproperties based on the public fields, property descriptors,
// and the getters and setters
setupProperties(descriptors);
/* old code
for (int i = 0; i < descriptors.length; i++) {
PropertyDescriptor descriptor = descriptors[i];
propertyDescriptors.put(descriptor.getName(), descriptor);
}
*/
EventSetDescriptor[] eventDescriptors = info.getEventSetDescriptors();
for (int i = 0; i < eventDescriptors.length; i++) {
EventSetDescriptor descriptor = eventDescriptors[i];
Method[] listenerMethods = descriptor.getListenerMethods();
for (int j = 0; j < listenerMethods.length; j++) {
Method listenerMethod = listenerMethods[j];
MetaMethod metaMethod = createMetaMethod(descriptor.getAddListenerMethod());
listeners.put(listenerMethod.getName(), metaMethod);
}
}
}
public static boolean isUseReflection() {
return useReflection;
}
/**
* Allows reflection to be enabled in situations where bytecode generation
* of method invocations causes issues.
*
* @param useReflection
*/
public static void setUseReflection(boolean useReflection) {
MetaClass.useReflection = useReflection;
}
private void addInheritedMethods(Class theClass) {
// lets add all the base class methods
Class c = theClass;
if (c != Object.class) {
while (true) {
c = c.getSuperclass();
if (c == Object.class || c == null) {
break;
}
addMethods(c);
addNewStaticMethodsFrom(c);
}
}
// now lets see if there are any methods on one of my interfaces
Class[] interfaces = theClass.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
addNewStaticMethodsFrom(interfaces[i]);
}
// lets add Object methods after interfaces, as all interfaces derive from Object.
// this ensures List and Collection methods come before Object etc
if (theClass != Object.class) {
addMethods(Object.class);
addNewStaticMethodsFrom(Object.class);
}
if (theClass.isArray() && !theClass.equals(Object[].class)) {
addNewStaticMethodsFrom(Object[].class);
}
}
/**
* @return all the normal instance methods avaiable on this class for the
* given name
*/
public List getMethods(String name) {
List answer = (List) methodIndex.get(name);
List used = GroovyCategorySupport.getCategoryMethods(theClass, name);
if (used != null) {
if (answer != null) {
answer.addAll(used);
} else{
answer = used;
}
}
if (answer == null) {
answer = Collections.EMPTY_LIST;
}
return answer;
}
/**
* @return all the normal static methods avaiable on this class for the
* given name
*/
public List getStaticMethods(String name) {
List answer = (List) staticMethodIndex.get(name);
if (answer == null) {
return Collections.EMPTY_LIST;
}
return answer;
}
/**
* Allows static method definitions to be added to a meta class as if it
* was an instance method
*
* @param method
*/
protected void addNewInstanceMethod(Method method) {
if (initialised) {
throw new RuntimeException("Already initialized, cannot add new method: " + method);
}
else {
NewInstanceMetaMethod newMethod = new NewInstanceMetaMethod(createMetaMethod(method));
addMethod(newMethod);
addNewInstanceMethod(newMethod);
}
}
protected void addNewInstanceMethod(MetaMethod method) {
newGroovyMethodsList.add(method);
}
protected void addNewStaticMethod(Method method) {
if (initialised) {
throw new RuntimeException("Already initialized, cannot add new method: " + method);
}
else {
NewStaticMetaMethod newMethod = new NewStaticMetaMethod(createMetaMethod(method));
addMethod(newMethod);
addNewStaticMethod(newMethod);
}
}
protected void addNewStaticMethod(MetaMethod method) {
newGroovyMethodsList.add(method);
}
public Object invokeMethod(Object object, String methodName, Object arguments) {
return invokeMethod(object, methodName, asArray(arguments));
}
/**
* Invokes the given method on the object.
*
*/
public Object invokeMethod(Object object, String methodName, Object[] arguments) {
if (object == null) {
throw new NullPointerException("Cannot invoke method: " + methodName + " on null object");
}
MetaMethod method = retrieveMethod(object, methodName, arguments);
if (method != null) {
return doMethodInvoke(object, method, arguments);
} else {
// if no method was found, try to find a closure defined as a field of the class and run it
try {
Object value = this.getProperty(object, methodName);
if (value instanceof Closure && object!=this) {
Closure closure = (Closure) value;
closure.setDelegate(this);
return closure.call(arguments);
}
else {
throw new MissingMethodException(methodName, theClass, arguments);
}
}
catch (Exception e) {
throw new MissingMethodException(methodName, theClass, arguments);
}
}
}
protected MetaMethod retrieveMethod(Object owner, String methodName, Object[] arguments) {
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) methodCache.get(methodKey);
if (method == null) {
method = pickMethod(owner, methodName, arguments);
if (method != null && method.isCacheable()) {
methodCache.put(methodKey.createCopy(), method);
}
}
return method;
}
public MetaMethod retrieveMethod(String methodName, Class[] arguments) {
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) methodCache.get(methodKey);
if (method == null) {
method = pickMethod(methodName, arguments); // todo shall call pickStaticMethod also?
if (method != null && method.isCacheable()) {
methodCache.put(methodKey.createCopy(), method);
}
}
return method;
}
public Constructor retrieveConstructor(Class[] arguments) {
Constructor constructor = (Constructor) chooseMethod("<init>", constructors, arguments, false);
if (constructor != null) {
return constructor;
}
else {
constructor = (Constructor) chooseMethod("<init>", constructors, arguments, true);
if (constructor != null) {
return constructor;
}
}
return null;
}
public MetaMethod retrieveStaticMethod(String methodName, Class[] arguments) {
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) staticMethodCache.get(methodKey);
if (method == null) {
method = pickStaticMethod(methodName, arguments);
if (method != null) {
staticMethodCache.put(methodKey.createCopy(), method);
}
}
return method;
}
/**
* Picks which method to invoke for the given object, method name and arguments
*/
protected MetaMethod pickMethod(Object object, String methodName, Object[] arguments) {
MetaMethod method = null;
List methods = getMethods(methodName);
if (!methods.isEmpty()) {
Class[] argClasses = convertToTypeArray(arguments);
method = (MetaMethod) chooseMethod(methodName, methods, argClasses, false);
if (method == null) {
method = (MetaMethod) chooseMethod(methodName, methods, argClasses, true);
if (method == null) {
int size = (arguments != null) ? arguments.length : 0;
if (size == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof List) {
// lets coerce the list arguments into an array of
// arguments
// e.g. calling JFrame.setLocation( [100, 100] )
List list = (List) firstArgument;
arguments = list.toArray();
argClasses = convertToTypeArray(arguments);
method = (MetaMethod) chooseMethod(methodName, methods, argClasses, true);
if (method==null) return null;
return new TransformMetaMethod(method) {
public Object invoke(Object object, Object[] arguments) throws Exception {
Object firstArgument = arguments[0];
List list = (List) firstArgument;
arguments = list.toArray();
return super.invoke(object, arguments);
}
};
}
}
}
}
}
return method;
}
/**
* pick a method in a strict manner, i.e., without reinterpreting the first List argument.
* this method is used only by ClassGenerator for static binding
* @param methodName
* @param arguments
* @return
*/
protected MetaMethod pickMethod(String methodName, Class[] arguments) {
MetaMethod method = null;
List methods = getMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
// no coersion at classgen time.
// if (method == null) {
// method = (MetaMethod) chooseMethod(methodName, methods, arguments, true);
// }
}
return method;
}
public Object invokeStaticMethod(Object object, String methodName, Object[] arguments) {
// System.out.println("Calling static method: " + methodName + " on args: " + InvokerHelper.toString(arguments));
// Class type = arguments == null ? null : arguments.getClass();
// System.out.println("Argument type: " + type);
// System.out.println("Type of first arg: " + arguments[0] + " type: " + arguments[0].getClass());
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) staticMethodCache.get(methodKey);
if (method == null) {
method = pickStaticMethod(object, methodName, arguments);
if (method != null) {
staticMethodCache.put(methodKey.createCopy(), method);
}
}
if (method != null) {
return doMethodInvoke(object, method, arguments);
}
/*
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
MetaMethod method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
if (method != null) {
return doMethodInvoke(theClass, method, arguments);
}
}
if (theClass != Class.class) {
try {
return registry.getMetaClass(Class.class).invokeMethod(object, methodName, arguments);
}
catch (GroovyRuntimeException e) {
// throw our own exception
}
}
*/
throw new MissingMethodException(methodName, theClass, arguments);
}
protected MetaMethod pickStaticMethod(Object object, String methodName, Object[] arguments) {
MetaMethod method = null;
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, convertToTypeArray(arguments), false);
}
if (method == null && theClass != Class.class) {
MetaClass classMetaClass = registry.getMetaClass(Class.class);
method = classMetaClass.pickMethod(object, methodName, arguments);
}
return method;
}
protected MetaMethod pickStaticMethod(String methodName, Class[] arguments) {
MetaMethod method = null;
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
// disabled to keep consistant with the original version of pickStatciMethod
// if (method == null) {
// method = (MetaMethod) chooseMethod(methodName, methods, arguments, true);
// }
}
if (method == null && theClass != Class.class) {
MetaClass classMetaClass = registry.getMetaClass(Class.class);
method = classMetaClass.pickMethod(methodName, arguments);
}
return method;
}
public Object invokeConstructor(Object[] arguments) {
Class[] argClasses = convertToTypeArray(arguments);
Constructor constructor = (Constructor) chooseMethod("<init>", constructors, argClasses, false);
if (constructor != null) {
return doConstructorInvoke(constructor, arguments);
}
else {
constructor = (Constructor) chooseMethod("<init>", constructors, argClasses, true);
if (constructor != null) {
return doConstructorInvoke(constructor, arguments);
}
}
if (arguments.length == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof Map) {
constructor = (Constructor) chooseMethod("<init>", constructors, EMPTY_TYPE_ARRAY, false);
if (constructor != null) {
Object bean = doConstructorInvoke(constructor, EMPTY_ARRAY);
setProperties(bean, ((Map) firstArgument));
return bean;
}
}
}
throw new GroovyRuntimeException(
"Could not find matching constructor for: "
+ theClass.getName()
+ "("+InvokerHelper.toTypeString(arguments)+")");
}
/**
* Sets a number of bean properties from the given Map where the keys are
* the String names of properties and the values are the values of the
* properties to set
*/
public void setProperties(Object bean, Map map) {
for (Iterator iter = map.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry) iter.next();
String key = entry.getKey().toString();
// do we have this property?
if(propertyMap.get(key) == null)
continue;
Object value = entry.getValue();
try {
setProperty(bean, key, value);
}
catch (GroovyRuntimeException e) {
// lets ignore missing properties
/** todo should replace this code with a getMetaProperty(key) != null check
i.e. don't try and set a non-existent property
*/
}
}
}
/**
* @return the given property's value on the object
*/
public Object getProperty(final Object object, final String property) {
/* old code; we now use the metaProperty map to handle properties
MetaMethod metaMethod = null;
PropertyDescriptor descriptor = (PropertyDescriptor) propertyDescriptors.get(property);
if (descriptor != null) {
Method method = descriptor.getReadMethod();
if (method == null) {
throw new GroovyRuntimeException("Cannot read property: " + property);
}
metaMethod = findMethod(method);
if (metaMethod == null) {
// lets try invoke a static getter method
metaMethod = findGetter(object, "get" + capitalize(property));
}
if (metaMethod != null) {
return doMethodInvoke(object, metaMethod, EMPTY_ARRAY);
}
}
*/
// look for the property in our map
MetaProperty mp = (MetaProperty) propertyMap.get(property);
if(mp != null) {
try {
//System.out.println("we found a metaproperty for " + theClass.getName() +
// "." + property);
// delegate the get operation to the metaproperty
return mp.getProperty(object);
}
catch(Exception e) {
throw new GroovyRuntimeException("Cannot read property: " + property);
}
}
if (genericGetMethod == null) {
// Make sure there isn't a generic method in the "use" cases
List possibleGenericMethods = getMethods("get");
if (possibleGenericMethods != null) {
for (Iterator i = possibleGenericMethods.iterator(); i.hasNext(); ) {
MetaMethod mmethod = (MetaMethod) i.next();
Class[] paramTypes = mmethod.getParameterTypes();
if (paramTypes.length == 1 && paramTypes[0] == String.class) {
Object[] arguments = {property};
Object answer = doMethodInvoke(object, mmethod, arguments);
return answer;
}
}
}
}
else {
Object[] arguments = { property };
Object answer = doMethodInvoke(object, genericGetMethod, arguments);
// jes bug? a property retrieved via a generic get() can't have a null value?
if (answer != null) {
return answer;
}
}
// is the property the name of a method - in which case return a
// closure
List methods = getMethods(property);
if (!methods.isEmpty()) {
return new MethodClosure(object, property);
}
// lets try invoke a static getter method
// this case is for protected fields. I wish there was a better way...
Exception lastException = null;
try {
MetaMethod method = findGetter(object, "get" + capitalize(property));
if (method != null) {
return doMethodInvoke(object, method, EMPTY_ARRAY);
}
}
catch (GroovyRuntimeException e) {
lastException = e;
}
/** todo or are we an extensible groovy class? */
if (genericGetMethod != null) {
return null;
}
else {
/** todo these special cases should be special MetaClasses maybe */
if (object instanceof Class) {
// lets try a static field
return getStaticProperty((Class) object, property);
}
if (object instanceof Collection) {
return DefaultGroovyMethods.getAt((Collection) object, property);
}
if (object instanceof Object[]) {
return DefaultGroovyMethods.getAt(Arrays.asList((Object[]) object), property);
}
if (object instanceof Object) {
try {
// lets try a public field
Field field = object.getClass().getDeclaredField(property);
return field.get(object);
}
catch (Exception e1) {
// fall through
}
}
MetaMethod addListenerMethod = (MetaMethod) listeners.get(property);
if (addListenerMethod != null) {
/* @todo one day we could try return the previously registered Closure listener for easy removal */
return null;
}
if (lastException == null)
throw new MissingPropertyException(property, theClass);
else
throw new MissingPropertyException(property, theClass, lastException);
}
}
/**
* Get all the properties defined for this type
* @return a list of MetaProperty objects
*/
public List getProperties() {
// simply return the values of the metaproperty map as a List
return new ArrayList(propertyMap.values());
}
/**
* This will build up the property map (Map of MetaProperty objects, keyed on
* property name).
*/
protected void setupProperties(PropertyDescriptor[] propertyDescriptors) {
MetaProperty mp;
Method method;
MetaMethod getter = null;
MetaMethod setter = null;
Class klass;
// first get the public fields and create MetaFieldProperty objects
klass = theClass;
while(klass != null) {
Field[] fields = klass.getDeclaredFields();
for(int i = 0; i < fields.length; i++) {
// we're only interested in publics
if((fields[i].getModifiers() & java.lang.reflect.Modifier.PUBLIC) == 0)
continue;
// see if we already got this
if(propertyMap.get(fields[i].getName()) != null)
continue;
//System.out.println("adding field " + fields[i].getName() +
// " for class " + klass.getName());
// stick it in there!
propertyMap.put(fields[i].getName(), new MetaFieldProperty(fields[i]));
}
// now get the super class
klass = klass.getSuperclass();
}
// if this an Array, then add the special read-only "length" property
if(theClass.isArray()) {
propertyMap.put("length", arrayLengthProperty);
}
// now iterate over the map of property descriptors and generate
// MetaBeanProperty objects
for(int i=0; i<propertyDescriptors.length; i++) {
PropertyDescriptor pd = propertyDescriptors[i];
// skip if the field already exists in the map
if(propertyMap.get(pd.getName()) != null)
continue;
// skip if the property type is unknown (this seems to be the case if the
// property descriptor is based on a setX() method that has two parameters,
// which is not a valid property)
if(pd.getPropertyType() == null)
continue;
// get the getter method
method = pd.getReadMethod();
if(method != null)
getter = findMethod(method);
else
getter = null;
// get the setter method
method = pd.getWriteMethod();
if(method != null)
setter = findMethod(method);
else
setter = null;
// now create the MetaProperty object
//System.out.println("creating a bean property for class " +
// theClass.getName() + ": " + pd.getName());
mp = new MetaBeanProperty(pd.getName(), pd.getPropertyType(), getter, setter);
// put it in the list
propertyMap.put(pd.getName(), mp);
}
// now look for any stray getters that may be used to define a property
klass = theClass;
while(klass != null) {
Method[] methods = klass.getDeclaredMethods();
for (int i = 0; i < methods.length; i++) {
// filter out the privates
if(Modifier.isPublic(methods[i].getModifiers()) == false)
continue;
method = methods[i];
String methodName = method.getName();
// is this a getter?
if(methodName.startsWith("get") &&
methodName.length() > 3 &&
method.getParameterTypes().length == 0) {
// get the name of the property
String propName = methodName.substring(3,4).toLowerCase() + methodName.substring(4);
// is this property already accounted for?
mp = (MetaProperty) propertyMap.get(propName);
if(mp != null) {
// we may have already found the setter for this
if(mp instanceof MetaBeanProperty && ((MetaBeanProperty) mp).getGetter() == null) {
// update the getter method to this one
((MetaBeanProperty) mp).setGetter(findMethod(method));
}
}
else {
// we need to create a new property object
// type of the property is what the get method returns
MetaBeanProperty mbp = new MetaBeanProperty(propName,
method.getReturnType(),
findMethod(method), null);
// add it to the map
propertyMap.put(propName, mbp);
}
}
else if(methodName.startsWith("set") &&
methodName.length() > 3 &&
method.getParameterTypes().length == 1) {
// get the name of the property
String propName = methodName.substring(3,4).toLowerCase() + methodName.substring(4);
// did we already find the getter of this?
mp = (MetaProperty) propertyMap.get(propName);
if(mp != null) {
if(mp instanceof MetaBeanProperty && ((MetaBeanProperty) mp).getSetter() == null) {
// update the setter method to this one
((MetaBeanProperty) mp).setSetter(findMethod(method));
}
}
else {
// this is a new property to add
MetaBeanProperty mbp = new MetaBeanProperty(propName,
method.getParameterTypes()[0],
null,
findMethod(method));
// add it to the map
propertyMap.put(propName, mbp);
}
}
}
// now get the super class
klass = klass.getSuperclass();
}
}
/**
* Sets the property value on an object
*/
public void setProperty(Object object, String property, Object newValue) {
/* old code, replaced with new MetaProperty stuff...
PropertyDescriptor descriptor = (PropertyDescriptor) propertyDescriptors.get(property);
if (descriptor != null) {
Method method = descriptor.getWriteMethod();
if (method == null) {
throw new ReadOnlyPropertyException(property, theClass);
}
MetaMethod metaMethod = findMethod(method);
Object[] arguments = { newValue };
try {
doMethodInvoke(object, metaMethod, arguments);
}
catch (GroovyRuntimeException e) {
// if the value is a List see if we can construct the value
// from a constructor
if (newValue instanceof List) {
List list = (List) newValue;
int params = list.size();
Constructor[] propConstructors = descriptor.getPropertyType().getConstructors();
for (int i = 0; i < propConstructors.length; i++) {
Constructor constructor = propConstructors[i];
if (constructor.getParameterTypes().length == params) {
Object value = doConstructorInvoke(constructor, list.toArray());
doMethodInvoke(object, metaMethod, new Object[] { value });
return;
}
}
// if value is an array
Class parameterType = method.getParameterTypes()[0];
if (parameterType.isArray()) {
Object objArray = asPrimitiveArray(list, parameterType);
doMethodInvoke(object, metaMethod, new Object[]{
objArray
});
return;
}
}
// if value is an multidimensional array
if (newValue.getClass().isArray()) {
List list = Arrays.asList((Object[])newValue);
Class parameterType = method.getParameterTypes()[0];
Class arrayType = parameterType.getComponentType();
Object objArray = Array.newInstance(arrayType, list.size());
for (int i = 0; i < list.size(); i++) {
List list2 =Arrays.asList((Object[]) list.get(i));
Object objArray2 = asPrimitiveArray(list2, arrayType);
Array.set(objArray, i, objArray2);
}
doMethodInvoke(object, metaMethod, new Object[]{
objArray
});
return;
}
throw new MissingPropertyException(property, theClass, e);
}
return;
}
*/
MetaProperty mp = (MetaProperty) propertyMap.get(property);
if(mp != null) {
try {
mp.setProperty(object, newValue);
return;
}
catch(ReadOnlyPropertyException e) {
// just rethrow it; there's nothing left to do here
throw e;
}
catch (Exception e) {
// if the value is a List see if we can construct the value
// from a constructor
if (newValue == null)
return;
if (newValue instanceof List) {
List list = (List) newValue;
int params = list.size();
Constructor[] constructors = mp.getType().getConstructors();
for (int i = 0; i < constructors.length; i++) {
Constructor constructor = constructors[i];
if (constructor.getParameterTypes().length == params) {
Object value = doConstructorInvoke(constructor, list.toArray());
mp.setProperty(object, value);
return;
}
}
// if value is an array
Class parameterType = mp.getType();
if (parameterType.isArray()) {
Object objArray = asPrimitiveArray(list, parameterType);
mp.setProperty(object, objArray);
return;
}
}
// if value is an multidimensional array
// jes currently this logic only supports metabeansproperties and
// not metafieldproperties. It shouldn't be too hard to support
// the latter...
if (newValue.getClass().isArray() && mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
List list = Arrays.asList((Object[])newValue);
MetaMethod setter = mbp.getSetter();
Class parameterType = setter.getParameterTypes()[0];
Class arrayType = parameterType.getComponentType();
Object objArray = Array.newInstance(arrayType, list.size());
for (int i = 0; i < list.size(); i++) {
List list2 =Arrays.asList((Object[]) list.get(i));
Object objArray2 = asPrimitiveArray(list2, arrayType);
Array.set(objArray, i, objArray2);
}
doMethodInvoke(object, setter, new Object[]{
objArray
});
return;
}
throw new MissingPropertyException(property, theClass, e);
}
}
try {
MetaMethod addListenerMethod = (MetaMethod) listeners.get(property);
if (addListenerMethod != null && newValue instanceof Closure) {
// lets create a dynamic proxy
Object proxy =
createListenerProxy(addListenerMethod.getParameterTypes()[0], property, (Closure) newValue);
doMethodInvoke(object, addListenerMethod, new Object[] { proxy });
return;
}
if (genericSetMethod == null) {
// Make sure there isn't a generic method in the "use" cases
List possibleGenericMethods = getMethods("set");
if (possibleGenericMethods != null) {
for (Iterator i = possibleGenericMethods.iterator(); i.hasNext(); ) {
MetaMethod mmethod = (MetaMethod) i.next();
Class[] paramTypes = mmethod.getParameterTypes();
if (paramTypes.length == 2 && paramTypes[0] == String.class) {
Object[] arguments = {property, newValue};
Object answer = doMethodInvoke(object, mmethod, arguments);
return;
}
}
}
}
else {
Object[] arguments = { property, newValue };
doMethodInvoke(object, genericSetMethod, arguments);
return;
}
/** todo or are we an extensible class? */
// lets try invoke the set method
// this is kind of ugly: if it is a protected field, we fall
// all the way down to this klunky code. Need a better
// way to handle this situation...
String method = "set" + capitalize(property);
try {
invokeMethod(object, method, new Object[] { newValue });
}
catch (MissingMethodException e1) {
try {
Field field = object.getClass().getDeclaredField(property);
field.set(object, newValue);
}
catch (Exception e2) {
throw new MissingPropertyException(property, theClass, e2);
}
}
}
catch (GroovyRuntimeException e) {
throw new MissingPropertyException(property, theClass, e);
}
// if we got here, the damn thing just aint there...
throw new MissingPropertyException(property, theClass);
}
/**
* @param list
* @param parameterType
* @return
*/
private Object asPrimitiveArray(List list, Class parameterType) {
Class arrayType = parameterType.getComponentType();
Object objArray = Array.newInstance(arrayType, list.size());
for (int i = 0; i < list.size(); i++) {
Object obj = list.get(i);
if (arrayType.isPrimitive()) {
if (obj instanceof Integer) {
Array.setInt(objArray, i, ((Integer) obj).intValue());
}
else if (obj instanceof Double) {
Array.setDouble(objArray, i, ((Double) obj).doubleValue());
}
else if (obj instanceof Boolean) {
Array.setBoolean(objArray, i, ((Boolean) obj).booleanValue());
}
else if (obj instanceof Long) {
Array.setLong(objArray, i, ((Long) obj).longValue());
}
else if (obj instanceof Float) {
Array.setFloat(objArray, i, ((Float) obj).floatValue());
}
else if (obj instanceof Character) {
Array.setChar(objArray, i, ((Character) obj).charValue());
}
else if (obj instanceof Byte) {
Array.setByte(objArray, i, ((Byte) obj).byteValue());
}
else if (obj instanceof Short) {
Array.setShort(objArray, i, ((Short) obj).shortValue());
}
}
else {
Array.set(objArray, i, obj);
}
}
return objArray;
}
public ClassNode getClassNode() {
if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) {
// lets try load it from the classpath
String className = theClass.getName();
String groovyFile = className;
int idx = groovyFile.indexOf('$');
if (idx > 0) {
groovyFile = groovyFile.substring(0, idx);
}
groovyFile = groovyFile.replace('.', '/') + ".groovy";
//System.out.println("Attempting to load: " + groovyFile);
URL url = theClass.getClassLoader().getResource(groovyFile);
if (url == null) {
url = Thread.currentThread().getContextClassLoader().getResource(groovyFile);
}
if (url != null) {
try {
/**
* todo there is no CompileUnit in scope so class name
* checking won't work but that mostly affects the bytecode
* generation rather than viewing the AST
*/
CompilationUnit.ClassgenCallback search = new CompilationUnit.ClassgenCallback() {
public void call( ClassVisitor writer, ClassNode node ) {
if( node.getName().equals(theClass.getName()) ) {
MetaClass.this.classNode = node;
}
}
};
CompilationUnit unit = new CompilationUnit( getClass().getClassLoader() );
unit.setClassgenCallback( search );
unit.addSource( url );
unit.compile( Phases.CLASS_GENERATION );
}
catch (Exception e) {
throw new GroovyRuntimeException("Exception thrown parsing: " + groovyFile + ". Reason: " + e, e);
}
}
}
return classNode;
}
public String toString() {
return super.toString() + "[" + theClass + "]";
}
// Implementation methods
//-------------------------------------------------------------------------
/**
* Converts the given object into an array; if its an array then just cast
* otherwise wrap it in an array
*/
protected Object[] asArray(Object arguments) {
if (arguments == null) {
return EMPTY_ARRAY;
}
if (arguments instanceof Tuple) {
Tuple tuple = (Tuple) arguments;
return tuple.toArray();
}
if (arguments instanceof Object[]) {
return (Object[]) arguments;
}
else {
return new Object[] { arguments };
}
}
/**
* @param listenerType
* the interface of the listener to proxy
* @param listenerMethodName
* the name of the method in the listener API to call the
* closure on
* @param closure
* the closure to invoke on the listenerMethodName method
* invocation
* @return a dynamic proxy which calls the given closure on the given
* method name
*/
protected Object createListenerProxy(Class listenerType, final String listenerMethodName, final Closure closure) {
InvocationHandler handler = new ClosureListener(listenerMethodName, closure);
return Proxy.newProxyInstance(listenerType.getClassLoader(), new Class[] { listenerType }, handler);
}
/**
* Adds all the methods declared in the given class to the metaclass
* ignoring any matching methods already defined by a derived class
*
* @param theClass
*/
protected void addMethods(Class theClass) {
Method[] methodArray = theClass.getDeclaredMethods();
for (int i = 0; i < methodArray.length; i++) {
Method reflectionMethod = methodArray[i];
if ( reflectionMethod.getName().indexOf('+') >= 0 ) {
continue;
}
MetaMethod method = createMetaMethod(reflectionMethod);
addMethod(method);
}
}
protected void addMethod(MetaMethod method) {
String name = method.getName();
//System.out.println(theClass.getName() + " == " + name + Arrays.asList(method.getParameterTypes()));
if (isGenericGetMethod(method) && genericGetMethod == null) {
genericGetMethod = method;
}
else if (isGenericSetMethod(method) && genericSetMethod == null) {
genericSetMethod = method;
}
if (method.isStatic()) {
List list = (List) staticMethodIndex.get(name);
if (list == null) {
list = new ArrayList();
staticMethodIndex.put(name, list);
list.add(method);
}
else {
if (!containsMatchingMethod(list, method)) {
list.add(method);
}
}
}
List list = (List) methodIndex.get(name);
if (list == null) {
list = new ArrayList();
methodIndex.put(name, list);
list.add(method);
}
else {
if (!containsMatchingMethod(list, method)) {
list.add(method);
}
}
}
/**
* @return true if a method of the same matching prototype was found in the
* list
*/
protected boolean containsMatchingMethod(List list, MetaMethod method) {
for (Iterator iter = list.iterator(); iter.hasNext();) {
MetaMethod aMethod = (MetaMethod) iter.next();
Class[] params1 = aMethod.getParameterTypes();
Class[] params2 = method.getParameterTypes();
if (params1.length == params2.length) {
boolean matches = true;
for (int i = 0; i < params1.length; i++) {
if (params1[i] != params2[i]) {
matches = false;
break;
}
}
if (matches) {
return true;
}
}
}
return false;
}
/**
* Adds all of the newly defined methods from the given class to this
* metaclass
*
* @param theClass
*/
protected void addNewStaticMethodsFrom(Class theClass) {
MetaClass interfaceMetaClass = registry.getMetaClass(theClass);
Iterator iter = interfaceMetaClass.newGroovyMethodsList.iterator();
while (iter.hasNext()) {
MetaMethod method = (MetaMethod) iter.next();
addMethod(method);
newGroovyMethodsList.add(method);
}
}
/**
* @return the value of the static property of the given class
*/
protected Object getStaticProperty(Class aClass, String property) {
//System.out.println("Invoking property: " + property + " on class: "
// + aClass);
Exception lastException = null;
try {
Field field = aClass.getField(property);
if (field != null) {
if ((field.getModifiers() & Modifier.STATIC) != 0) {
return field.get(null);
}
}
}
catch (Exception e) {
lastException = e;
}
// lets try invoke a static getter method
try {
MetaMethod method = findStaticGetter(aClass, "get" + capitalize(property));
if (method != null) {
return doMethodInvoke(aClass, method, EMPTY_ARRAY);
}
}
catch (GroovyRuntimeException e) {
throw new MissingPropertyException(property, aClass, e);
}
if (lastException == null) {
throw new MissingPropertyException(property, aClass);
}
else {
throw new MissingPropertyException(property, aClass, lastException);
}
}
/**
* @return the matching method which should be found
*/
protected MetaMethod findMethod(Method aMethod) {
List methods = getMethods(aMethod.getName());
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.isMethod(aMethod)) {
return method;
}
}
//log.warning("Creating reflection based dispatcher for: " + aMethod);
return new ReflectionMetaMethod(aMethod);
}
/**
* @return the getter method for the given object
*/
protected MetaMethod findGetter(Object object, String name) {
List methods = getMethods(name);
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.getParameterTypes().length == 0) {
return method;
}
}
return null;
}
/**
* @return the Method of the given name with no parameters or null
*/
protected MetaMethod findStaticGetter(Class type, String name) {
List methods = getStaticMethods(name);
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.getParameterTypes().length == 0) {
return method;
}
}
/** todo dirty hack - don't understand why this code is necessary - all methods should be in the allMethods list! */
try {
Method method = type.getMethod(name, EMPTY_TYPE_ARRAY);
if ((method.getModifiers() & Modifier.STATIC) != 0) {
return findMethod(method);
}
else {
return null;
}
}
catch (Exception e) {
return null;
}
}
protected Object doMethodInvoke(Object object, MetaMethod method, Object[] argumentArray) {
//System.out.println("Evaluating method: " + method);
//System.out.println("on object: " + object + " with arguments: " +
// InvokerHelper.toString(argumentArray));
//System.out.println(this.theClass);
try {
if (argumentArray == null) {
argumentArray = EMPTY_ARRAY;
}
else if (method.getParameterTypes().length == 1 && argumentArray.length == 0) {
argumentArray = ARRAY_WITH_NULL;
}
return method.invoke(object, argumentArray);
}
catch (ClassCastException e) {
if (coerceGStrings(argumentArray)) {
try {
return doMethodInvoke(object, method, argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (InvocationTargetException e) {
/*Throwable t = e.getTargetException();
if (t instanceof Error) {
Error error = (Error) t;
throw error;
}
if (t instanceof RuntimeException) {
RuntimeException runtimeEx = (RuntimeException) t;
throw runtimeEx;
}*/
throw new InvokerInvocationException(e);
}
catch (IllegalAccessException e) {
throw new GroovyRuntimeException(
"could not access method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (IllegalArgumentException e) {
if (coerceGStrings(argumentArray)) {
try {
return doMethodInvoke(object, method, argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
Object[] args = coerceNumbers(method, argumentArray);
if (args != null && !Arrays.equals(argumentArray,args)) {
try {
return doMethodInvoke(object, method, args);
}
catch (Exception e3) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray));
}
catch (RuntimeException e) {
throw e;
}
catch (Exception e) {
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
}
private static Object[] coerceNumbers(MetaMethod method, Object[] arguments) {
Object[] ans = null;
boolean coerced = false; // to indicate that at least one param is coerced
Class[] params = method.getParameterTypes();
if (params.length != arguments.length) {
return null;
}
ans = new Object[arguments.length];
for (int i = 0, size = arguments.length; i < size; i++) {
Object argument = arguments[i];
Class param = params[i];
if ((Number.class.isAssignableFrom(param) || param.isPrimitive()) && argument instanceof Number) { // Number types
if (param == Byte.class || param == Byte.TYPE ) {
ans[i] = new Byte(((Number)argument).byteValue());
coerced = true; continue;
}
if (param == Double.class || param == Double.TYPE) {
ans[i] = new Double(((Number)argument).doubleValue());
coerced = true; continue;
}
if (param == Float.class || param == Float.TYPE) {
ans[i] = new Float(((Number)argument).floatValue());
coerced = true; continue;
}
if (param == Integer.class || param == Integer.TYPE) {
ans[i] = new Integer(((Number)argument).intValue());
coerced = true; continue;
}
if (param == Long.class || param == Long.TYPE) {
ans[i] = new Long(((Number)argument).longValue());
coerced = true; continue;
}
if (param == Short.class || param == Short.TYPE) {
ans[i] = new Short(((Number)argument).shortValue());
coerced = true; continue;
}
if (param == BigDecimal.class ) {
ans[i] = new BigDecimal(((Number)argument).doubleValue());
coerced = true; continue;
}
if (param == BigInteger.class) {
ans[i] = new BigInteger(String.valueOf(((Number)argument).longValue()));
coerced = true; continue;
}
}
else if (param.isArray() && argument.getClass().isArray()) {
Class paramElem = param.getComponentType();
if (paramElem.isPrimitive()) {
if (paramElem == boolean.class && argument.getClass().getName().equals("[Ljava.lang.Boolean;")) {
ans[i] = InvokerHelper.convertToBooleanArray(argument);
coerced = true;
continue;
}
if (paramElem == byte.class && argument.getClass().getName().equals("[Ljava.lang.Byte;")) {
ans[i] = InvokerHelper.convertToByteArray(argument);
coerced = true;
continue;
}
if (paramElem == char.class && argument.getClass().getName().equals("[Ljava.lang.Character;")) {
ans[i] = InvokerHelper.convertToCharArray(argument);
coerced = true;
continue;
}
if (paramElem == short.class && argument.getClass().getName().equals("[Ljava.lang.Short;")) {
ans[i] = InvokerHelper.convertToShortArray(argument);
coerced = true;
continue;
}
if (paramElem == int.class && argument.getClass().getName().equals("[Ljava.lang.Integer;")) {
ans[i] = InvokerHelper.convertToIntArray(argument);
coerced = true;
continue;
}
if (paramElem == long.class
&& argument.getClass().getName().equals("[Ljava.lang.Long;")
&& argument.getClass().getName().equals("[Ljava.lang.Integer;")
) {
ans[i] = InvokerHelper.convertToLongArray(argument);
coerced = true;
continue;
}
if (paramElem == float.class
&& argument.getClass().getName().equals("[Ljava.lang.Float;")
&& argument.getClass().getName().equals("[Ljava.lang.Integer;")
) {
ans[i] = InvokerHelper.convertToFloatArray(argument);
coerced = true;
continue;
}
if (paramElem == double.class &&
argument.getClass().getName().equals("[Ljava.lang.Double;") &&
argument.getClass().getName().equals("[Ljava.lang.BigDecimal;") &&
argument.getClass().getName().equals("[Ljava.lang.Float;")) {
ans[i] = InvokerHelper.convertToDoubleArray(argument);
coerced = true;
continue;
}
}
}
}
return coerced ? ans : null;
}
protected Object doConstructorInvoke(Constructor constructor, Object[] argumentArray) {
//System.out.println("Evaluating constructor: " + constructor + " with
// arguments: " + InvokerHelper.toString(argumentArray));
//System.out.println(this.theClass);
try {
// the following patch was provided by Mori Kouhei to fix JIRA 435
/* but it opens the ctor up to everyone, so it is no longer private!
final Constructor ctor = constructor;
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
ctor.setAccessible(ctor.getDeclaringClass().equals(theClass));
return null;
}
});
*/
// end of patch
return constructor.newInstance(argumentArray);
}
catch (InvocationTargetException e) {
/*Throwable t = e.getTargetException();
if (t instanceof Error) {
Error error = (Error) t;
throw error;
}
if (t instanceof RuntimeException) {
RuntimeException runtimeEx = (RuntimeException) t;
throw runtimeEx;
}*/
throw new InvokerInvocationException(e);
}
catch (IllegalArgumentException e) {
if (coerceGStrings(argumentArray)) {
try {
return constructor.newInstance(argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (IllegalAccessException e) {
throw new GroovyRuntimeException(
"could not access constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (Exception e) {
throw new GroovyRuntimeException(
"failed to invoke constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
}
/**
* Chooses the correct method to use from a list of methods which match by
* name.
*
* @param methods
* the possible methods to choose from
* @param arguments
* the original argument to the method
* @return
*/
protected Object chooseMethod(String methodName, List methods, Class[] arguments, boolean coerce) {
int methodCount = methods.size();
if (methodCount <= 0) {
return null;
}
else if (methodCount == 1) {
Object method = methods.get(0);
if (isValidMethod(method, arguments, coerce)) {
return method;
}
return null;
}
Object answer = null;
if (arguments == null || arguments.length == 0) {
answer = chooseEmptyMethodParams(methods);
}
else if (arguments.length == 1 && arguments[0] == null) {
answer = chooseMostGeneralMethodWith1NullParam(methods);
}
else {
List matchingMethods = new ArrayList();
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes;
// making this false helps find matches
if (isValidMethod(method, arguments, coerce)) {
matchingMethods.add(method);
}
}
if (matchingMethods.isEmpty()) {
return null;
}
else if (matchingMethods.size() == 1) {
return matchingMethods.get(0);
}
return chooseMostSpecificParams(methodName, matchingMethods, arguments);
}
if (answer != null) {
return answer;
}
throw new GroovyRuntimeException(
"Could not find which method to invoke from this list: "
+ methods
+ " for arguments: "
+ InvokerHelper.toString(arguments));
}
protected boolean isValidMethod(Object method, Class[] arguments, boolean includeCoerce) {
Class[] paramTypes = getParameterTypes(method);
return isValidMethod(paramTypes, arguments, includeCoerce);
}
public static boolean isValidMethod(Class[] paramTypes, Class[] arguments, boolean includeCoerce) {
if (arguments == null) {
return true;
}
int size = arguments.length;
boolean validMethod = false;
if (paramTypes.length == size) {
// lets check the parameter types match
validMethod = true;
for (int i = 0; i < size; i++) {
if (!isCompatibleClass(paramTypes[i], arguments[i], includeCoerce)) {
validMethod = false;
}
}
}
else {
if (paramTypes.length == 1 && size == 0) {
return true;
}
}
return validMethod;
}
protected Object chooseMostSpecificParams(String name, List matchingMethods, Class[] arguments) {
for (Iterator iter = matchingMethods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
if (Arrays.equals(arguments, paramTypes)) return method;
}
Object answer = null;
int size = arguments.length;
Class[] mostSpecificTypes = null;
for (Iterator iter = matchingMethods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
if (answer == null) {
answer = method;
mostSpecificTypes = paramTypes;
}
else {
boolean useThisMethod = false;
for (int i = 0; i < size; i++) {
Class mostSpecificType = mostSpecificTypes[i];
Class type = paramTypes[i];
if (!isAssignableFrom(mostSpecificType, type)) {
useThisMethod = true;
break;
}
}
if (useThisMethod) {
if (size > 1) {
checkForInvalidOverloading(name, mostSpecificTypes, paramTypes);
}
answer = method;
mostSpecificTypes = paramTypes;
}
}
}
return answer;
}
/**
* Checks that one of the parameter types is a superset of the other and
* that the two lists of types don't conflict. e.g. foo(String, Object) and
* foo(Object, String) would conflict if called with foo("a", "b").
*
* Note that this method is only called with 2 possible signatures. i.e.
* possible invalid combinations will already have been filtered out. So if
* there were methods foo(String, Object) and foo(Object, String) then one
* of these would be already filtered out if foo was called as foo(12, "a")
*/
protected void checkForInvalidOverloading(String name, Class[] baseTypes, Class[] derivedTypes) {
for (int i = 0, size = baseTypes.length; i < size; i++) {
Class baseType = baseTypes[i];
Class derivedType = derivedTypes[i];
if (!isAssignableFrom(derivedType, baseType)) {
throw new GroovyRuntimeException(
"Ambiguous method overloading for method: "
+ name
+ ". Cannot resolve which method to invoke due to overlapping prototypes between: "
+ InvokerHelper.toString(baseTypes)
+ " and: "
+ InvokerHelper.toString(derivedTypes));
}
}
}
protected Class[] getParameterTypes(Object methodOrConstructor) {
if (methodOrConstructor instanceof MetaMethod) {
MetaMethod method = (MetaMethod) methodOrConstructor;
return method.getParameterTypes();
}
if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor;
return method.getParameterTypes();
}
if (methodOrConstructor instanceof Constructor) {
Constructor constructor = (Constructor) methodOrConstructor;
return constructor.getParameterTypes();
}
throw new IllegalArgumentException("Must be a Method or Constructor");
}
/**
* @return the method with 1 parameter which takes the most general type of
* object (e.g. Object) ignoring primitve types
*/
protected Object chooseMostGeneralMethodWith1NullParam(List methods) {
// lets look for methods with 1 argument which matches the type of the
// arguments
Class closestClass = null;
Object answer = null;
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
int paramLength = paramTypes.length;
if (paramLength == 1) {
Class theType = paramTypes[0];
if (theType.isPrimitive()) continue;
if (closestClass == null || isAssignableFrom(closestClass, theType)) {
closestClass = theType;
answer = method;
}
}
}
return answer;
}
/**
* @return the method with 1 parameter which takes the most general type of
* object (e.g. Object)
*/
protected Object chooseEmptyMethodParams(List methods) {
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
int paramLength = paramTypes.length;
if (paramLength == 0) {
return method;
}
}
return null;
}
protected static boolean isCompatibleInstance(Class type, Object value, boolean includeCoerce) {
boolean answer = value == null || type.isInstance(value);
if (!answer) {
if (type.isPrimitive()) {
if (type == int.class) {
return value instanceof Integer;
}
else if (type == double.class) {
return value instanceof Double || value instanceof Float || value instanceof Integer || value instanceof BigDecimal;
}
else if (type == boolean.class) {
return value instanceof Boolean;
}
else if (type == long.class) {
return value instanceof Long || value instanceof Integer;
}
else if (type == float.class) {
return value instanceof Float || value instanceof Integer;
}
else if (type == char.class) {
return value instanceof Character;
}
else if (type == byte.class) {
return value instanceof Byte;
}
else if (type == short.class) {
return value instanceof Short;
}
}
else if(type.isArray() && value.getClass().isArray()) {
return isCompatibleClass(type.getComponentType(), value.getClass().getComponentType(), false);
}
else if (includeCoerce) {
if (type == String.class && value instanceof GString) {
return true;
}
else if (value instanceof Number) {
// lets allow numbers to be coerced downwards?
return Number.class.isAssignableFrom(type);
}
}
}
return answer;
}
protected static boolean isCompatibleClass(Class type, Class value, boolean includeCoerce) {
boolean answer = value == null || type.isAssignableFrom(value); // this might have taken care of primitive types, rendering part of the following code unnecessary
if (!answer) {
if (type.isPrimitive()) {
if (type == int.class) {
return value == Integer.class;// || value == BigDecimal.class; //br added BigDecimal
}
else if (type == double.class) {
return value == Double.class || value == Float.class || value == Integer.class || value == BigDecimal.class;
}
else if (type == boolean.class) {
return value == Boolean.class;
}
else if (type == long.class) {
return value == Long.class || value == Integer.class; // || value == BigDecimal.class;//br added BigDecimal
}
else if (type == float.class) {
return value == Float.class || value == Integer.class; // || value == BigDecimal.class;//br added BigDecimal
}
else if (type == char.class) {
return value == Character.class;
}
else if (type == byte.class) {
return value == Byte.class;
}
else if (type == short.class) {
return value == Short.class;
}
}
else if(type.isArray() && value.isArray()) {
return isCompatibleClass(type.getComponentType(), value.getComponentType(), false);
}
else if (includeCoerce) {
//if (type == String.class && value == GString.class) {
if (type == String.class && GString.class.isAssignableFrom(value)) {
return true;
}
else if (value == Number.class) {
// lets allow numbers to be coerced downwards?
return Number.class.isAssignableFrom(type);
}
}
}
return answer;
}
protected boolean isAssignableFrom(Class mostSpecificType, Class type) {
// let's handle primitives
if (mostSpecificType.isPrimitive() && type.isPrimitive()) {
if (mostSpecificType == type) {
return true;
}
else { // note: there is not coercion for boolean and char. Range matters, precision doesn't
if (type == int.class) {
return
mostSpecificType == int.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else if (type == double.class) {
return
mostSpecificType == double.class
|| mostSpecificType == int.class
|| mostSpecificType == long.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class
|| mostSpecificType == float.class;
}
else if (type == long.class) {
return
mostSpecificType == long.class
|| mostSpecificType == int.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else if (type == float.class) {
return
mostSpecificType == float.class
|| mostSpecificType == int.class
|| mostSpecificType == long.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else if (type == short.class) {
return
mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else {
return false;
}
}
}
boolean answer = type.isAssignableFrom(mostSpecificType);
if (!answer) {
answer = autoboxType(type).isAssignableFrom(autoboxType(mostSpecificType));
}
return answer;
}
private Class autoboxType(Class type) {
if (type.isPrimitive()) {
if (type == int.class) {
return Integer.class;
}
else if (type == double.class) {
return Double.class;
}
else if (type == long.class) {
return Long.class;
}
else if (type == boolean.class) {
return Boolean.class;
}
else if (type == float.class) {
return Float.class;
}
else if (type == char.class) {
return Character.class;
}
else if (type == byte.class) {
return Byte.class;
}
else if (type == short.class) {
return Short.class;
}
}
return type;
}
/**
* Coerces any GString instances into Strings
*
* @return true if some coercion was done.
*/
protected static boolean coerceGStrings(Object[] arguments) {
boolean coerced = false;
for (int i = 0, size = arguments.length; i < size; i++) {
Object argument = arguments[i];
if (argument instanceof GString) {
arguments[i] = argument.toString();
coerced = true;
}
}
return coerced;
}
protected boolean isGenericSetMethod(MetaMethod method) {
return (method.getName().equals("set"))
&& method.getParameterTypes().length == 2;
}
protected boolean isGenericGetMethod(MetaMethod method) {
if (method.getName().equals("get")) {
Class[] parameterTypes = method.getParameterTypes();
return parameterTypes.length == 1 && parameterTypes[0] == String.class;
}
return false;
}
private void registerMethods(boolean instanceMethods) {
Method[] methods = theClass.getMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (MethodHelper.isStatic(method)) {
Class[] paramTypes = method.getParameterTypes();
if (paramTypes.length > 0) {
Class owner = paramTypes[0];
if (instanceMethods) {
registry.lookup(owner).addNewInstanceMethod(method);
} else {
registry.lookup(owner).addNewStaticMethod(method);
}
}
}
}
}
protected void registerStaticMethods() {
registerMethods(false);
}
protected void registerInstanceMethods() {
registerMethods(true);
}
protected String capitalize(String property) {
return property.substring(0, 1).toUpperCase() + property.substring(1, property.length());
}
/**
* Call this method when any mutation method is called, such as adding a new
* method to this MetaClass so that any caching or bytecode generation can be
* regenerated.
*/
protected synchronized void onMethodChange() {
reflector = null;
}
protected synchronized void checkInitialised() {
if (!initialised) {
initialised = true;
addInheritedMethods(theClass);
}
if (reflector == null) {
generateReflector();
}
}
protected MetaMethod createMetaMethod(final Method method) {
if (registry.useAccessible()) {
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
method.setAccessible(true);
return null;
}
});
}
if (useReflection) {
//log.warning("Creating reflection based dispatcher for: " + method);
return new ReflectionMetaMethod(method);
}
MetaMethod answer = new MetaMethod(method);
if (isValidReflectorMethod(answer)) {
allMethods.add(answer);
answer.setMethodIndex(allMethods.size());
}
else {
//log.warning("Creating reflection based dispatcher for: " + method);
answer = new ReflectionMetaMethod(method);
}
return answer;
}
protected boolean isValidReflectorMethod(MetaMethod method) {
// We cannot use a reflector if the method is private, protected, or package accessible only.
if (!method.isPublic()) {
return false;
}
Class declaringClass = method.getDeclaringClass();
if (!Modifier.isPublic(declaringClass.getModifiers())) {
// lets see if this method is implemented on an interface
List list = getInterfaceMethods();
for (Iterator iter = list.iterator(); iter.hasNext();) {
MetaMethod aMethod = (MetaMethod) iter.next();
if (method.isSame(aMethod)) {
method.setInterfaceClass(aMethod.getDeclaringClass());
return true;
}
}
/** todo */
//log.warning("Cannot invoke method on protected/private class which isn't visible on an interface so must use reflection instead: " + method);
return false;
}
return true;
}
protected void generateReflector() {
reflector = loadReflector(allMethods);
if (reflector == null) {
throw new RuntimeException("Should have a reflector!");
}
// lets set the reflector on all the methods
for (Iterator iter = allMethods.iterator(); iter.hasNext();) {
MetaMethod metaMethod = (MetaMethod) iter.next();
//System.out.println("Setting reflector for method: " + metaMethod + " with index: " + metaMethod.getMethodIndex());
metaMethod.setReflector(reflector);
}
}
protected Reflector loadReflector(List methods) {
ReflectorGenerator generator = new ReflectorGenerator(methods);
String className = theClass.getName();
String packagePrefix = "gjdk.";
/*
if (className.startsWith("java.")) {
packagePrefix = "gjdk.";
}
*/
String name = packagePrefix + className + "_GroovyReflector";
if (theClass.isArray()) {
String componentName = theClass.getComponentType().getName();
/*
if (componentName.startsWith("java.")) {
packagePrefix = "gjdk.";
}
*/
name = packagePrefix + componentName + "_GroovyReflectorArray";
}
// lets see if its already loaded
try {
Class type = loadReflectorClass(name);
return (Reflector) type.newInstance();
}
catch (AccessControlException ace) {
//Don't ignore this exception type
throw ace;
}
catch (Exception e) {
// lets ignore, lets generate it && load it
}
ClassWriter cw = new ClassWriter(true);
generator.generate(cw, name);
byte[] bytecode = cw.toByteArray();
try {
Class type = loadReflectorClass(name, bytecode);
return (Reflector) type.newInstance();
}
catch (Exception e) {
throw new GroovyRuntimeException("Could not load the reflector for class: " + name + ". Reason: " + e, e);
}
}
protected Class loadReflectorClass(final String name, final byte[] bytecode) throws ClassNotFoundException {
ClassLoader loader = theClass.getClassLoader();
if (loader instanceof GroovyClassLoader) {
final GroovyClassLoader gloader = (GroovyClassLoader) loader;
return (Class) AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
return gloader.defineClass(name, bytecode, getClass().getProtectionDomain());
}
});
}
return registry.loadClass(name, bytecode);
}
protected Class loadReflectorClass(String name) throws ClassNotFoundException {
ClassLoader loader = theClass.getClassLoader();
if (loader instanceof GroovyClassLoader) {
GroovyClassLoader gloader = (GroovyClassLoader) loader;
return gloader.loadClass(name);
}
return registry.loadClass(name);
}
public List getMethods() {
return allMethods;
}
public List getMetaMethods() {
return (List) ((ArrayList)newGroovyMethodsList).clone();
}
protected synchronized List getInterfaceMethods() {
if (interfaceMethods == null) {
interfaceMethods = new ArrayList();
Class type = theClass;
while (type != null) {
Class[] interfaces = type.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
Class iface = interfaces[i];
Method[] methods = iface.getMethods();
addInterfaceMethods(interfaceMethods, methods);
}
type = type.getSuperclass();
}
}
return interfaceMethods;
}
private void addInterfaceMethods(List list, Method[] methods) {
for (int i = 0; i < methods.length; i++) {
list.add(createMetaMethod(methods[i]));
}
}
/**
* param instance array to the type array
* @param args
* @return
*/
Class[] convertToTypeArray(Object[] args) {
if (args == null)
return null;
int s = args.length;
Class[] ans = new Class[s];
for (int i = 0; i < s; i++) {
Object o = args[i];
if (o != null) {
ans[i] = o.getClass();
} else {
ans[i] = null;
}
}
return ans;
}
}
|
src/main/groovy/lang/MetaClass.java
|
/*
$Id$
Copyright 2003 (C) James Strachan and Bob Mcwhirter. All Rights Reserved.
Redistribution and use of this software and associated documentation
("Software"), with or without modification, are permitted provided
that the following conditions are met:
1. Redistributions of source code must retain copyright
statements and notices. Redistributions must also contain a
copy of this document.
2. Redistributions in binary form must reproduce the
above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
3. The name "groovy" must not be used to endorse or promote
products derived from this Software without prior written
permission of The Codehaus. For written permission,
please contact info@codehaus.org.
4. Products derived from this Software may not be called "groovy"
nor may "groovy" appear in their names without prior written
permission of The Codehaus. "groovy" is a registered
trademark of The Codehaus.
5. Due credit should be given to The Codehaus -
http://groovy.codehaus.org/
THIS SOFTWARE IS PROVIDED BY THE CODEHAUS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE CODEHAUS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package groovy.lang;
import java.beans.BeanInfo;
import java.beans.EventSetDescriptor;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
import java.security.AccessControlException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.classgen.ReflectorGenerator;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.Phases;
import org.codehaus.groovy.runtime.ClosureListener;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import org.codehaus.groovy.runtime.GroovyCategorySupport;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.InvokerInvocationException;
import org.codehaus.groovy.runtime.MethodClosure;
import org.codehaus.groovy.runtime.MethodHelper;
import org.codehaus.groovy.runtime.MethodKey;
import org.codehaus.groovy.runtime.NewInstanceMetaMethod;
import org.codehaus.groovy.runtime.NewStaticMetaMethod;
import org.codehaus.groovy.runtime.ReflectionMetaMethod;
import org.codehaus.groovy.runtime.Reflector;
import org.codehaus.groovy.runtime.TemporaryMethodKey;
import org.codehaus.groovy.runtime.TransformMetaMethod;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
/**
* Allows methods to be dynamically added to existing classes at runtime
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @author Guillaume Laforge
* @version $Revision$
*/
public class MetaClass {
private static final Logger log = Logger.getLogger(MetaClass.class.getName());
public static final Object[] EMPTY_ARRAY = {
};
public static Class[] EMPTY_TYPE_ARRAY = {
};
protected static final Object[] ARRAY_WITH_NULL = { null };
private static boolean useReflection = false;
private MetaClassRegistry registry;
private Class theClass;
private ClassNode classNode;
private Map methodIndex = new HashMap();
private Map staticMethodIndex = new HashMap();
private List newGroovyMethodsList = new ArrayList();
//private Map propertyDescriptors = Collections.synchronizedMap(new HashMap());
private Map propertyMap = Collections.synchronizedMap(new HashMap());
private Map listeners = new HashMap();
private Map methodCache = Collections.synchronizedMap(new HashMap());
private Map staticMethodCache = Collections.synchronizedMap(new HashMap());
private MetaMethod genericGetMethod;
private MetaMethod genericSetMethod;
private List constructors;
private List allMethods = new ArrayList();
private List interfaceMethods;
private Reflector reflector;
private boolean initialised;
// we only need one of these that can be reused over and over.
private MetaProperty arrayLengthProperty = new MetaArrayLengthProperty();
public MetaClass(MetaClassRegistry registry, final Class theClass) throws IntrospectionException {
this.registry = registry;
this.theClass = theClass;
constructors = Arrays.asList(theClass.getDeclaredConstructors());
addMethods(theClass);
// introspect
BeanInfo info = null;
try {
info =(BeanInfo) AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws IntrospectionException {
return Introspector.getBeanInfo(theClass);
}
});
} catch (PrivilegedActionException pae) {
if (pae.getException() instanceof IntrospectionException) {
throw (IntrospectionException) pae.getException();
} else {
throw new RuntimeException(pae.getException());
}
}
PropertyDescriptor[] descriptors = info.getPropertyDescriptors();
// build up the metaproperties based on the public fields, property descriptors,
// and the getters and setters
setupProperties(descriptors);
/* old code
for (int i = 0; i < descriptors.length; i++) {
PropertyDescriptor descriptor = descriptors[i];
propertyDescriptors.put(descriptor.getName(), descriptor);
}
*/
EventSetDescriptor[] eventDescriptors = info.getEventSetDescriptors();
for (int i = 0; i < eventDescriptors.length; i++) {
EventSetDescriptor descriptor = eventDescriptors[i];
Method[] listenerMethods = descriptor.getListenerMethods();
for (int j = 0; j < listenerMethods.length; j++) {
Method listenerMethod = listenerMethods[j];
MetaMethod metaMethod = createMetaMethod(descriptor.getAddListenerMethod());
listeners.put(listenerMethod.getName(), metaMethod);
}
}
}
public static boolean isUseReflection() {
return useReflection;
}
/**
* Allows reflection to be enabled in situations where bytecode generation
* of method invocations causes issues.
*
* @param useReflection
*/
public static void setUseReflection(boolean useReflection) {
MetaClass.useReflection = useReflection;
}
private void addInheritedMethods(Class theClass) {
// lets add all the base class methods
Class c = theClass;
if (c != Object.class) {
while (true) {
c = c.getSuperclass();
if (c == Object.class || c == null) {
break;
}
addMethods(c);
addNewStaticMethodsFrom(c);
}
}
// now lets see if there are any methods on one of my interfaces
Class[] interfaces = theClass.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
addNewStaticMethodsFrom(interfaces[i]);
}
// lets add Object methods after interfaces, as all interfaces derive from Object.
// this ensures List and Collection methods come before Object etc
if (theClass != Object.class) {
addMethods(Object.class);
addNewStaticMethodsFrom(Object.class);
}
if (theClass.isArray() && !theClass.equals(Object[].class)) {
addNewStaticMethodsFrom(Object[].class);
}
}
/**
* @return all the normal instance methods avaiable on this class for the
* given name
*/
public List getMethods(String name) {
List answer = (List) methodIndex.get(name);
List used = GroovyCategorySupport.getCategoryMethods(theClass, name);
if (used != null) {
if (answer != null) {
answer.addAll(used);
} else{
answer = used;
}
}
if (answer == null) {
answer = Collections.EMPTY_LIST;
}
return answer;
}
/**
* @return all the normal static methods avaiable on this class for the
* given name
*/
public List getStaticMethods(String name) {
List answer = (List) staticMethodIndex.get(name);
if (answer == null) {
return Collections.EMPTY_LIST;
}
return answer;
}
/**
* Allows static method definitions to be added to a meta class as if it
* was an instance method
*
* @param method
*/
protected void addNewInstanceMethod(Method method) {
if (initialised) {
throw new RuntimeException("Already initialized, cannot add new method: " + method);
}
else {
NewInstanceMetaMethod newMethod = new NewInstanceMetaMethod(createMetaMethod(method));
addMethod(newMethod);
addNewInstanceMethod(newMethod);
}
}
protected void addNewInstanceMethod(MetaMethod method) {
newGroovyMethodsList.add(method);
}
protected void addNewStaticMethod(Method method) {
if (initialised) {
throw new RuntimeException("Already initialized, cannot add new method: " + method);
}
else {
NewStaticMetaMethod newMethod = new NewStaticMetaMethod(createMetaMethod(method));
addMethod(newMethod);
addNewStaticMethod(newMethod);
}
}
protected void addNewStaticMethod(MetaMethod method) {
newGroovyMethodsList.add(method);
}
public Object invokeMethod(Object object, String methodName, Object arguments) {
return invokeMethod(object, methodName, asArray(arguments));
}
/**
* Invokes the given method on the object.
*
*/
public Object invokeMethod(Object object, String methodName, Object[] arguments) {
if (object == null) {
throw new NullPointerException("Cannot invoke method: " + methodName + " on null object");
}
MetaMethod method = retrieveMethod(object, methodName, arguments);
if (method != null) {
return doMethodInvoke(object, method, arguments);
} else {
// if no method was found, try to find a closure defined as a field of the class and run it
try {
Object value = this.getProperty(object, methodName);
if (value instanceof Closure && object!=this) {
Closure closure = (Closure) value;
closure.setDelegate(this);
return closure.call(arguments);
}
else {
throw new MissingMethodException(methodName, theClass, arguments);
}
}
catch (Exception e) {
throw new MissingMethodException(methodName, theClass, arguments);
}
}
}
protected MetaMethod retrieveMethod(Object owner, String methodName, Object[] arguments) {
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) methodCache.get(methodKey);
if (method == null) {
method = pickMethod(owner, methodName, arguments);
if (method != null && method.isCacheable()) {
methodCache.put(methodKey.createCopy(), method);
}
}
return method;
}
public MetaMethod retrieveMethod(String methodName, Class[] arguments) {
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) methodCache.get(methodKey);
if (method == null) {
method = pickMethod(methodName, arguments); // todo shall call pickStaticMethod also?
if (method != null && method.isCacheable()) {
methodCache.put(methodKey.createCopy(), method);
}
}
return method;
}
public Constructor retrieveConstructor(Class[] arguments) {
Constructor constructor = (Constructor) chooseMethod("<init>", constructors, arguments, false);
if (constructor != null) {
return constructor;
}
else {
constructor = (Constructor) chooseMethod("<init>", constructors, arguments, true);
if (constructor != null) {
return constructor;
}
}
return null;
}
public MetaMethod retrieveStaticMethod(String methodName, Class[] arguments) {
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) staticMethodCache.get(methodKey);
if (method == null) {
method = pickStaticMethod(methodName, arguments);
if (method != null) {
staticMethodCache.put(methodKey.createCopy(), method);
}
}
return method;
}
/**
* Picks which method to invoke for the given object, method name and arguments
*/
protected MetaMethod pickMethod(Object object, String methodName, Object[] arguments) {
MetaMethod method = null;
List methods = getMethods(methodName);
if (!methods.isEmpty()) {
Class[] argClasses = convertToTypeArray(arguments);
method = (MetaMethod) chooseMethod(methodName, methods, argClasses, false);
if (method == null) {
method = (MetaMethod) chooseMethod(methodName, methods, argClasses, true);
if (method == null) {
int size = (arguments != null) ? arguments.length : 0;
if (size == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof List) {
// lets coerce the list arguments into an array of
// arguments
// e.g. calling JFrame.setLocation( [100, 100] )
List list = (List) firstArgument;
arguments = list.toArray();
argClasses = convertToTypeArray(arguments);
method = (MetaMethod) chooseMethod(methodName, methods, argClasses, true);
if (method==null) return null;
return new TransformMetaMethod(method) {
public Object invoke(Object object, Object[] arguments) throws Exception {
Object firstArgument = arguments[0];
List list = (List) firstArgument;
arguments = list.toArray();
return super.invoke(object, arguments);
}
};
}
}
}
}
}
return method;
}
/**
* pick a method in a strict manner, i.e., without reinterpreting the first List argument.
* this method is used only by ClassGenerator for static binding
* @param methodName
* @param arguments
* @return
*/
protected MetaMethod pickMethod(String methodName, Class[] arguments) {
MetaMethod method = null;
List methods = getMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
// no coersion at classgen time.
// if (method == null) {
// method = (MetaMethod) chooseMethod(methodName, methods, arguments, true);
// }
}
return method;
}
public Object invokeStaticMethod(Object object, String methodName, Object[] arguments) {
// System.out.println("Calling static method: " + methodName + " on args: " + InvokerHelper.toString(arguments));
// Class type = arguments == null ? null : arguments.getClass();
// System.out.println("Argument type: " + type);
// System.out.println("Type of first arg: " + arguments[0] + " type: " + arguments[0].getClass());
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) staticMethodCache.get(methodKey);
if (method == null) {
method = pickStaticMethod(object, methodName, arguments);
if (method != null) {
staticMethodCache.put(methodKey.createCopy(), method);
}
}
if (method != null) {
return doMethodInvoke(object, method, arguments);
}
/*
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
MetaMethod method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
if (method != null) {
return doMethodInvoke(theClass, method, arguments);
}
}
if (theClass != Class.class) {
try {
return registry.getMetaClass(Class.class).invokeMethod(object, methodName, arguments);
}
catch (GroovyRuntimeException e) {
// throw our own exception
}
}
*/
throw new MissingMethodException(methodName, theClass, arguments);
}
protected MetaMethod pickStaticMethod(Object object, String methodName, Object[] arguments) {
MetaMethod method = null;
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, convertToTypeArray(arguments), false);
}
if (method == null && theClass != Class.class) {
MetaClass classMetaClass = registry.getMetaClass(Class.class);
method = classMetaClass.pickMethod(object, methodName, arguments);
}
return method;
}
protected MetaMethod pickStaticMethod(String methodName, Class[] arguments) {
MetaMethod method = null;
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
// disabled to keep consistant with the original version of pickStatciMethod
// if (method == null) {
// method = (MetaMethod) chooseMethod(methodName, methods, arguments, true);
// }
}
if (method == null && theClass != Class.class) {
MetaClass classMetaClass = registry.getMetaClass(Class.class);
method = classMetaClass.pickMethod(methodName, arguments);
}
return method;
}
public Object invokeConstructor(Object[] arguments) {
Class[] argClasses = convertToTypeArray(arguments);
Constructor constructor = (Constructor) chooseMethod("<init>", constructors, argClasses, false);
if (constructor != null) {
return doConstructorInvoke(constructor, arguments);
}
else {
constructor = (Constructor) chooseMethod("<init>", constructors, argClasses, true);
if (constructor != null) {
return doConstructorInvoke(constructor, arguments);
}
}
if (arguments.length == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof Map) {
constructor = (Constructor) chooseMethod("<init>", constructors, EMPTY_TYPE_ARRAY, false);
if (constructor != null) {
Object bean = doConstructorInvoke(constructor, EMPTY_ARRAY);
setProperties(bean, ((Map) firstArgument));
return bean;
}
}
}
throw new GroovyRuntimeException(
"Could not find matching constructor for: "
+ theClass.getName()
+ "("+InvokerHelper.toTypeString(arguments)+")");
}
/**
* Sets a number of bean properties from the given Map where the keys are
* the String names of properties and the values are the values of the
* properties to set
*/
public void setProperties(Object bean, Map map) {
for (Iterator iter = map.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry) iter.next();
String key = entry.getKey().toString();
// do we have this property?
if(propertyMap.get(key) == null)
continue;
Object value = entry.getValue();
try {
setProperty(bean, key, value);
}
catch (GroovyRuntimeException e) {
// lets ignore missing properties
/** todo should replace this code with a getMetaProperty(key) != null check
i.e. don't try and set a non-existent property
*/
}
}
}
/**
* @return the given property's value on the object
*/
public Object getProperty(final Object object, final String property) {
/* old code; we now use the metaProperty map to handle properties
MetaMethod metaMethod = null;
PropertyDescriptor descriptor = (PropertyDescriptor) propertyDescriptors.get(property);
if (descriptor != null) {
Method method = descriptor.getReadMethod();
if (method == null) {
throw new GroovyRuntimeException("Cannot read property: " + property);
}
metaMethod = findMethod(method);
if (metaMethod == null) {
// lets try invoke a static getter method
metaMethod = findGetter(object, "get" + capitalize(property));
}
if (metaMethod != null) {
return doMethodInvoke(object, metaMethod, EMPTY_ARRAY);
}
}
*/
// look for the property in our map
MetaProperty mp = (MetaProperty) propertyMap.get(property);
if(mp != null) {
try {
//System.out.println("we found a metaproperty for " + theClass.getName() +
// "." + property);
// delegate the get operation to the metaproperty
return mp.getProperty(object);
}
catch(Exception e) {
throw new GroovyRuntimeException("Cannot read property: " + property);
}
}
if (genericGetMethod == null) {
// Make sure there isn't a generic method in the "use" cases
List possibleGenericMethods = getMethods("get");
if (possibleGenericMethods != null) {
for (Iterator i = possibleGenericMethods.iterator(); i.hasNext(); ) {
MetaMethod mmethod = (MetaMethod) i.next();
Class[] paramTypes = mmethod.getParameterTypes();
if (paramTypes.length == 1 && paramTypes[0] == String.class) {
Object[] arguments = {property};
Object answer = doMethodInvoke(object, mmethod, arguments);
return answer;
}
}
}
}
else {
Object[] arguments = { property };
Object answer = doMethodInvoke(object, genericGetMethod, arguments);
// jes bug? a property retrieved via a generic get() can't have a null value?
if (answer != null) {
return answer;
}
}
// is the property the name of a method - in which case return a
// closure
List methods = getMethods(property);
if (!methods.isEmpty()) {
return new MethodClosure(object, property);
}
// lets try invoke a static getter method
// this case is for protected fields. I wish there was a better way...
Exception lastException = null;
try {
MetaMethod method = findGetter(object, "get" + capitalize(property));
if (method != null) {
return doMethodInvoke(object, method, EMPTY_ARRAY);
}
}
catch (GroovyRuntimeException e) {
lastException = e;
}
/** todo or are we an extensible groovy class? */
if (genericGetMethod != null) {
return null;
}
else {
/** todo these special cases should be special MetaClasses maybe */
if (object instanceof Class) {
// lets try a static field
return getStaticProperty((Class) object, property);
}
if (object instanceof Collection) {
return DefaultGroovyMethods.getAt((Collection) object, property);
}
if (object instanceof Object[]) {
return DefaultGroovyMethods.getAt(Arrays.asList((Object[]) object), property);
}
if (object instanceof Object) {
try {
// lets try a public field
Field field = object.getClass().getDeclaredField(property);
return field.get(object);
}
catch (Exception e1) {
// fall through
}
}
MetaMethod addListenerMethod = (MetaMethod) listeners.get(property);
if (addListenerMethod != null) {
/* @todo one day we could try return the previously registered Closure listener for easy removal */
return null;
}
if (lastException == null)
throw new MissingPropertyException(property, theClass);
else
throw new MissingPropertyException(property, theClass, lastException);
}
}
/**
* Get all the properties defined for this type
* @return a list of MetaProperty objects
*/
public List getProperties() {
// simply return the values of the metaproperty map as a List
return new ArrayList(propertyMap.values());
}
/**
* This will build up the property map (Map of MetaProperty objects, keyed on
* property name).
*/
protected void setupProperties(PropertyDescriptor[] propertyDescriptors) {
MetaProperty mp;
Method method;
MetaMethod getter = null;
MetaMethod setter = null;
Class klass;
// first get the public fields and create MetaFieldProperty objects
klass = theClass;
while(klass != null) {
Field[] fields = klass.getDeclaredFields();
for(int i = 0; i < fields.length; i++) {
// we're only interested in publics
if((fields[i].getModifiers() & java.lang.reflect.Modifier.PUBLIC) == 0)
continue;
// see if we already got this
if(propertyMap.get(fields[i].getName()) != null)
continue;
//System.out.println("adding field " + fields[i].getName() +
// " for class " + klass.getName());
// stick it in there!
propertyMap.put(fields[i].getName(), new MetaFieldProperty(fields[i]));
}
// now get the super class
klass = klass.getSuperclass();
}
// if this an Array, then add the special read-only "length" property
if(theClass.isArray()) {
propertyMap.put("length", arrayLengthProperty);
}
// now iterate over the map of property descriptors and generate
// MetaBeanProperty objects
for(int i=0; i<propertyDescriptors.length; i++) {
PropertyDescriptor pd = propertyDescriptors[i];
// skip if the field already exists in the map
if(propertyMap.get(pd.getName()) != null)
continue;
// skip if the property type is unknown (this seems to be the case if the
// property descriptor is based on a setX() method that has two parameters,
// which is not a valid property)
if(pd.getPropertyType() == null)
continue;
// get the getter method
method = pd.getReadMethod();
if(method != null)
getter = findMethod(method);
else
getter = null;
// get the setter method
method = pd.getWriteMethod();
if(method != null)
setter = findMethod(method);
else
setter = null;
// now create the MetaProperty object
//System.out.println("creating a bean property for class " +
// theClass.getName() + ": " + pd.getName());
mp = new MetaBeanProperty(pd.getName(), pd.getPropertyType(), getter, setter);
// put it in the list
propertyMap.put(pd.getName(), mp);
}
// now look for any stray getters that may be used to define a property
klass = theClass;
while(klass != null) {
Method[] methods = klass.getDeclaredMethods();
for (int i = 0; i < methods.length; i++) {
// filter out the privates
if(Modifier.isPublic(methods[i].getModifiers()) == false)
continue;
method = methods[i];
String methodName = method.getName();
// is this a getter?
if(methodName.startsWith("get") &&
methodName.length() > 3 &&
method.getParameterTypes().length == 0) {
// get the name of the property
String propName = methodName.substring(3,4).toLowerCase() + methodName.substring(4);
// is this property already accounted for?
mp = (MetaProperty) propertyMap.get(propName);
if(mp != null) {
// we may have already found the setter for this
if(mp instanceof MetaBeanProperty && ((MetaBeanProperty) mp).getGetter() == null) {
// update the getter method to this one
((MetaBeanProperty) mp).setGetter(findMethod(method));
}
}
else {
// we need to create a new property object
// type of the property is what the get method returns
MetaBeanProperty mbp = new MetaBeanProperty(propName,
method.getReturnType(),
findMethod(method), null);
// add it to the map
propertyMap.put(propName, mbp);
}
}
else if(methodName.startsWith("set") &&
methodName.length() > 3 &&
method.getParameterTypes().length == 1) {
// get the name of the property
String propName = methodName.substring(3,4).toLowerCase() + methodName.substring(4);
// did we already find the getter of this?
mp = (MetaProperty) propertyMap.get(propName);
if(mp != null) {
if(mp instanceof MetaBeanProperty && ((MetaBeanProperty) mp).getSetter() == null) {
// update the setter method to this one
((MetaBeanProperty) mp).setSetter(findMethod(method));
}
}
else {
// this is a new property to add
MetaBeanProperty mbp = new MetaBeanProperty(propName,
method.getParameterTypes()[0],
null,
findMethod(method));
// add it to the map
propertyMap.put(propName, mbp);
}
}
}
// now get the super class
klass = klass.getSuperclass();
}
}
/**
* Sets the property value on an object
*/
public void setProperty(Object object, String property, Object newValue) {
/* old code, replaced with new MetaProperty stuff...
PropertyDescriptor descriptor = (PropertyDescriptor) propertyDescriptors.get(property);
if (descriptor != null) {
Method method = descriptor.getWriteMethod();
if (method == null) {
throw new ReadOnlyPropertyException(property, theClass);
}
MetaMethod metaMethod = findMethod(method);
Object[] arguments = { newValue };
try {
doMethodInvoke(object, metaMethod, arguments);
}
catch (GroovyRuntimeException e) {
// if the value is a List see if we can construct the value
// from a constructor
if (newValue instanceof List) {
List list = (List) newValue;
int params = list.size();
Constructor[] propConstructors = descriptor.getPropertyType().getConstructors();
for (int i = 0; i < propConstructors.length; i++) {
Constructor constructor = propConstructors[i];
if (constructor.getParameterTypes().length == params) {
Object value = doConstructorInvoke(constructor, list.toArray());
doMethodInvoke(object, metaMethod, new Object[] { value });
return;
}
}
// if value is an array
Class parameterType = method.getParameterTypes()[0];
if (parameterType.isArray()) {
Object objArray = asPrimitiveArray(list, parameterType);
doMethodInvoke(object, metaMethod, new Object[]{
objArray
});
return;
}
}
// if value is an multidimensional array
if (newValue.getClass().isArray()) {
List list = Arrays.asList((Object[])newValue);
Class parameterType = method.getParameterTypes()[0];
Class arrayType = parameterType.getComponentType();
Object objArray = Array.newInstance(arrayType, list.size());
for (int i = 0; i < list.size(); i++) {
List list2 =Arrays.asList((Object[]) list.get(i));
Object objArray2 = asPrimitiveArray(list2, arrayType);
Array.set(objArray, i, objArray2);
}
doMethodInvoke(object, metaMethod, new Object[]{
objArray
});
return;
}
throw new MissingPropertyException(property, theClass, e);
}
return;
}
*/
MetaProperty mp = (MetaProperty) propertyMap.get(property);
if(mp != null) {
try {
mp.setProperty(object, newValue);
return;
}
catch(ReadOnlyPropertyException e) {
// just rethrow it; there's nothing left to do here
throw e;
}
catch (Exception e) {
// if the value is a List see if we can construct the value
// from a constructor
if (newValue == null)
return;
if (newValue instanceof List) {
List list = (List) newValue;
int params = list.size();
Constructor[] constructors = mp.getType().getConstructors();
for (int i = 0; i < constructors.length; i++) {
Constructor constructor = constructors[i];
if (constructor.getParameterTypes().length == params) {
Object value = doConstructorInvoke(constructor, list.toArray());
mp.setProperty(object, value);
return;
}
}
// if value is an array
Class parameterType = mp.getType();
if (parameterType.isArray()) {
Object objArray = asPrimitiveArray(list, parameterType);
mp.setProperty(object, objArray);
return;
}
}
// if value is an multidimensional array
// jes currently this logic only supports metabeansproperties and
// not metafieldproperties. It shouldn't be too hard to support
// the latter...
if (newValue.getClass().isArray() && mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
List list = Arrays.asList((Object[])newValue);
MetaMethod setter = mbp.getSetter();
Class parameterType = setter.getParameterTypes()[0];
Class arrayType = parameterType.getComponentType();
Object objArray = Array.newInstance(arrayType, list.size());
for (int i = 0; i < list.size(); i++) {
List list2 =Arrays.asList((Object[]) list.get(i));
Object objArray2 = asPrimitiveArray(list2, arrayType);
Array.set(objArray, i, objArray2);
}
doMethodInvoke(object, setter, new Object[]{
objArray
});
return;
}
throw new MissingPropertyException(property, theClass, e);
}
}
try {
MetaMethod addListenerMethod = (MetaMethod) listeners.get(property);
if (addListenerMethod != null && newValue instanceof Closure) {
// lets create a dynamic proxy
Object proxy =
createListenerProxy(addListenerMethod.getParameterTypes()[0], property, (Closure) newValue);
doMethodInvoke(object, addListenerMethod, new Object[] { proxy });
return;
}
if (genericSetMethod == null) {
// Make sure there isn't a generic method in the "use" cases
List possibleGenericMethods = getMethods("set");
if (possibleGenericMethods != null) {
for (Iterator i = possibleGenericMethods.iterator(); i.hasNext(); ) {
MetaMethod mmethod = (MetaMethod) i.next();
Class[] paramTypes = mmethod.getParameterTypes();
if (paramTypes.length == 2 && paramTypes[0] == String.class) {
Object[] arguments = {property, newValue};
Object answer = doMethodInvoke(object, mmethod, arguments);
return;
}
}
}
}
else {
Object[] arguments = { property, newValue };
doMethodInvoke(object, genericSetMethod, arguments);
return;
}
/** todo or are we an extensible class? */
// lets try invoke the set method
// this is kind of ugly: if it is a protected field, we fall
// all the way down to this klunky code. Need a better
// way to handle this situation...
String method = "set" + capitalize(property);
try {
invokeMethod(object, method, new Object[] { newValue });
}
catch (MissingMethodException e1) {
try {
Field field = object.getClass().getDeclaredField(property);
field.set(object, newValue);
}
catch (Exception e2) {
throw new MissingPropertyException(property, theClass, e2);
}
}
}
catch (GroovyRuntimeException e) {
throw new MissingPropertyException(property, theClass, e);
}
// if we got here, the damn thing just aint there...
throw new MissingPropertyException(property, theClass);
}
/**
* @param list
* @param parameterType
* @return
*/
private Object asPrimitiveArray(List list, Class parameterType) {
Class arrayType = parameterType.getComponentType();
Object objArray = Array.newInstance(arrayType, list.size());
for (int i = 0; i < list.size(); i++) {
Object obj = list.get(i);
if (arrayType.isPrimitive()) {
if (obj instanceof Integer) {
Array.setInt(objArray, i, ((Integer) obj).intValue());
}
else if (obj instanceof Double) {
Array.setDouble(objArray, i, ((Double) obj).doubleValue());
}
else if (obj instanceof Boolean) {
Array.setBoolean(objArray, i, ((Boolean) obj).booleanValue());
}
else if (obj instanceof Long) {
Array.setLong(objArray, i, ((Long) obj).longValue());
}
else if (obj instanceof Float) {
Array.setFloat(objArray, i, ((Float) obj).floatValue());
}
else if (obj instanceof Character) {
Array.setChar(objArray, i, ((Character) obj).charValue());
}
else if (obj instanceof Byte) {
Array.setByte(objArray, i, ((Byte) obj).byteValue());
}
else if (obj instanceof Short) {
Array.setShort(objArray, i, ((Short) obj).shortValue());
}
}
else {
Array.set(objArray, i, obj);
}
}
return objArray;
}
public ClassNode getClassNode() {
if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) {
// lets try load it from the classpath
String className = theClass.getName();
String groovyFile = className;
int idx = groovyFile.indexOf('$');
if (idx > 0) {
groovyFile = groovyFile.substring(0, idx);
}
groovyFile = groovyFile.replace('.', '/') + ".groovy";
//System.out.println("Attempting to load: " + groovyFile);
URL url = theClass.getClassLoader().getResource(groovyFile);
if (url == null) {
url = Thread.currentThread().getContextClassLoader().getResource(groovyFile);
}
if (url != null) {
try {
/**
* todo there is no CompileUnit in scope so class name
* checking won't work but that mostly affects the bytecode
* generation rather than viewing the AST
*/
CompilationUnit.ClassgenCallback search = new CompilationUnit.ClassgenCallback() {
public void call( ClassVisitor writer, ClassNode node ) {
if( node.getName().equals(theClass.getName()) ) {
MetaClass.this.classNode = node;
}
}
};
CompilationUnit unit = new CompilationUnit( getClass().getClassLoader() );
unit.setClassgenCallback( search );
unit.addSource( url );
unit.compile( Phases.CLASS_GENERATION );
}
catch (Exception e) {
throw new GroovyRuntimeException("Exception thrown parsing: " + groovyFile + ". Reason: " + e, e);
}
}
}
return classNode;
}
public String toString() {
return super.toString() + "[" + theClass + "]";
}
// Implementation methods
//-------------------------------------------------------------------------
/**
* Converts the given object into an array; if its an array then just cast
* otherwise wrap it in an array
*/
protected Object[] asArray(Object arguments) {
if (arguments == null) {
return EMPTY_ARRAY;
}
if (arguments instanceof Tuple) {
Tuple tuple = (Tuple) arguments;
return tuple.toArray();
}
if (arguments instanceof Object[]) {
return (Object[]) arguments;
}
else {
return new Object[] { arguments };
}
}
/**
* @param listenerType
* the interface of the listener to proxy
* @param listenerMethodName
* the name of the method in the listener API to call the
* closure on
* @param closure
* the closure to invoke on the listenerMethodName method
* invocation
* @return a dynamic proxy which calls the given closure on the given
* method name
*/
protected Object createListenerProxy(Class listenerType, final String listenerMethodName, final Closure closure) {
InvocationHandler handler = new ClosureListener(listenerMethodName, closure);
return Proxy.newProxyInstance(listenerType.getClassLoader(), new Class[] { listenerType }, handler);
}
/**
* Adds all the methods declared in the given class to the metaclass
* ignoring any matching methods already defined by a derived class
*
* @param theClass
*/
protected void addMethods(Class theClass) {
Method[] methodArray = theClass.getDeclaredMethods();
for (int i = 0; i < methodArray.length; i++) {
Method reflectionMethod = methodArray[i];
if ( reflectionMethod.getName().indexOf('+') >= 0 ) {
continue;
}
MetaMethod method = createMetaMethod(reflectionMethod);
addMethod(method);
}
}
protected void addMethod(MetaMethod method) {
String name = method.getName();
//System.out.println(theClass.getName() + " == " + name + Arrays.asList(method.getParameterTypes()));
if (isGenericGetMethod(method) && genericGetMethod == null) {
genericGetMethod = method;
}
else if (isGenericSetMethod(method) && genericSetMethod == null) {
genericSetMethod = method;
}
if (method.isStatic()) {
List list = (List) staticMethodIndex.get(name);
if (list == null) {
list = new ArrayList();
staticMethodIndex.put(name, list);
list.add(method);
}
else {
if (!containsMatchingMethod(list, method)) {
list.add(method);
}
}
}
List list = (List) methodIndex.get(name);
if (list == null) {
list = new ArrayList();
methodIndex.put(name, list);
list.add(method);
}
else {
if (!containsMatchingMethod(list, method)) {
list.add(method);
}
}
}
/**
* @return true if a method of the same matching prototype was found in the
* list
*/
protected boolean containsMatchingMethod(List list, MetaMethod method) {
for (Iterator iter = list.iterator(); iter.hasNext();) {
MetaMethod aMethod = (MetaMethod) iter.next();
Class[] params1 = aMethod.getParameterTypes();
Class[] params2 = method.getParameterTypes();
if (params1.length == params2.length) {
boolean matches = true;
for (int i = 0; i < params1.length; i++) {
if (params1[i] != params2[i]) {
matches = false;
break;
}
}
if (matches) {
return true;
}
}
}
return false;
}
/**
* Adds all of the newly defined methods from the given class to this
* metaclass
*
* @param theClass
*/
protected void addNewStaticMethodsFrom(Class theClass) {
MetaClass interfaceMetaClass = registry.getMetaClass(theClass);
Iterator iter = interfaceMetaClass.newGroovyMethodsList.iterator();
while (iter.hasNext()) {
MetaMethod method = (MetaMethod) iter.next();
addMethod(method);
newGroovyMethodsList.add(method);
}
}
/**
* @return the value of the static property of the given class
*/
protected Object getStaticProperty(Class aClass, String property) {
//System.out.println("Invoking property: " + property + " on class: "
// + aClass);
Exception lastException = null;
try {
Field field = aClass.getField(property);
if (field != null) {
if ((field.getModifiers() & Modifier.STATIC) != 0) {
return field.get(null);
}
}
}
catch (Exception e) {
lastException = e;
}
// lets try invoke a static getter method
try {
MetaMethod method = findStaticGetter(aClass, "get" + capitalize(property));
if (method != null) {
return doMethodInvoke(aClass, method, EMPTY_ARRAY);
}
}
catch (GroovyRuntimeException e) {
throw new MissingPropertyException(property, aClass, e);
}
if (lastException == null) {
throw new MissingPropertyException(property, aClass);
}
else {
throw new MissingPropertyException(property, aClass, lastException);
}
}
/**
* @return the matching method which should be found
*/
protected MetaMethod findMethod(Method aMethod) {
List methods = getMethods(aMethod.getName());
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.isMethod(aMethod)) {
return method;
}
}
//log.warning("Creating reflection based dispatcher for: " + aMethod);
return new ReflectionMetaMethod(aMethod);
}
/**
* @return the getter method for the given object
*/
protected MetaMethod findGetter(Object object, String name) {
List methods = getMethods(name);
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.getParameterTypes().length == 0) {
return method;
}
}
return null;
}
/**
* @return the Method of the given name with no parameters or null
*/
protected MetaMethod findStaticGetter(Class type, String name) {
List methods = getStaticMethods(name);
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.getParameterTypes().length == 0) {
return method;
}
}
/** todo dirty hack - don't understand why this code is necessary - all methods should be in the allMethods list! */
try {
Method method = type.getMethod(name, EMPTY_TYPE_ARRAY);
if ((method.getModifiers() & Modifier.STATIC) != 0) {
return findMethod(method);
}
else {
return null;
}
}
catch (Exception e) {
return null;
}
}
protected Object doMethodInvoke(Object object, MetaMethod method, Object[] argumentArray) {
//System.out.println("Evaluating method: " + method);
//System.out.println("on object: " + object + " with arguments: " +
// InvokerHelper.toString(argumentArray));
//System.out.println(this.theClass);
try {
if (argumentArray == null) {
argumentArray = EMPTY_ARRAY;
}
else if (method.getParameterTypes().length == 1 && argumentArray.length == 0) {
argumentArray = ARRAY_WITH_NULL;
}
return method.invoke(object, argumentArray);
}
catch (ClassCastException e) {
if (coerceGStrings(argumentArray)) {
try {
return doMethodInvoke(object, method, argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (InvocationTargetException e) {
/*Throwable t = e.getTargetException();
if (t instanceof Error) {
Error error = (Error) t;
throw error;
}
if (t instanceof RuntimeException) {
RuntimeException runtimeEx = (RuntimeException) t;
throw runtimeEx;
}*/
throw new InvokerInvocationException(e);
}
catch (IllegalAccessException e) {
throw new GroovyRuntimeException(
"could not access method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (IllegalArgumentException e) {
if (coerceGStrings(argumentArray)) {
try {
return doMethodInvoke(object, method, argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
Object[] args = coerceNumbers(method, argumentArray);
if (args != null) {
try {
return doMethodInvoke(object, method, args);
}
catch (Exception e3) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (RuntimeException e) {
throw e;
}
catch (Exception e) {
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
}
private static Object[] coerceNumbers(MetaMethod method, Object[] arguments) {
Object[] ans = null;
boolean coerced = false; // to indicate that at least one param is coerced
Class[] params = method.getParameterTypes();
if (params.length != arguments.length) {
return null;
}
ans = new Object[arguments.length];
for (int i = 0, size = arguments.length; i < size; i++) {
Object argument = arguments[i];
Class param = params[i];
if ((Number.class.isAssignableFrom(param) || param.isPrimitive()) && argument instanceof Number) { // Number types
if (param == Byte.class || param == Byte.TYPE ) {
ans[i] = new Byte(((Number)argument).byteValue());
coerced = true; continue;
}
if (param == Double.class || param == Double.TYPE) {
ans[i] = new Double(((Number)argument).doubleValue());
coerced = true; continue;
}
if (param == Float.class || param == Float.TYPE) {
ans[i] = new Float(((Number)argument).floatValue());
coerced = true; continue;
}
if (param == Integer.class || param == Integer.TYPE) {
ans[i] = new Integer(((Number)argument).intValue());
coerced = true; continue;
}
if (param == Long.class || param == Long.TYPE) {
ans[i] = new Long(((Number)argument).longValue());
coerced = true; continue;
}
if (param == Short.class || param == Short.TYPE) {
ans[i] = new Short(((Number)argument).shortValue());
coerced = true; continue;
}
if (param == BigDecimal.class ) {
ans[i] = new BigDecimal(((Number)argument).doubleValue());
coerced = true; continue;
}
if (param == BigInteger.class) {
ans[i] = new BigInteger(String.valueOf(((Number)argument).longValue()));
coerced = true; continue;
}
}
else if (param.isArray() && argument.getClass().isArray()) {
Class paramElem = param.getComponentType();
if (paramElem.isPrimitive()) {
if (paramElem == boolean.class && argument.getClass().getName().equals("[Ljava.lang.Boolean;")) {
ans[i] = InvokerHelper.convertToBooleanArray(argument);
coerced = true;
continue;
}
if (paramElem == byte.class && argument.getClass().getName().equals("[Ljava.lang.Byte;")) {
ans[i] = InvokerHelper.convertToByteArray(argument);
coerced = true;
continue;
}
if (paramElem == char.class && argument.getClass().getName().equals("[Ljava.lang.Character;")) {
ans[i] = InvokerHelper.convertToCharArray(argument);
coerced = true;
continue;
}
if (paramElem == short.class && argument.getClass().getName().equals("[Ljava.lang.Short;")) {
ans[i] = InvokerHelper.convertToShortArray(argument);
coerced = true;
continue;
}
if (paramElem == int.class && argument.getClass().getName().equals("[Ljava.lang.Integer;")) {
ans[i] = InvokerHelper.convertToIntArray(argument);
coerced = true;
continue;
}
if (paramElem == long.class
&& argument.getClass().getName().equals("[Ljava.lang.Long;")
&& argument.getClass().getName().equals("[Ljava.lang.Integer;")
) {
ans[i] = InvokerHelper.convertToLongArray(argument);
coerced = true;
continue;
}
if (paramElem == float.class
&& argument.getClass().getName().equals("[Ljava.lang.Float;")
&& argument.getClass().getName().equals("[Ljava.lang.Integer;")
) {
ans[i] = InvokerHelper.convertToFloatArray(argument);
coerced = true;
continue;
}
if (paramElem == double.class &&
argument.getClass().getName().equals("[Ljava.lang.Double;") &&
argument.getClass().getName().equals("[Ljava.lang.BigDecimal;") &&
argument.getClass().getName().equals("[Ljava.lang.Float;")) {
ans[i] = InvokerHelper.convertToDoubleArray(argument);
coerced = true;
continue;
}
}
}
}
return coerced ? ans : null;
}
protected Object doConstructorInvoke(Constructor constructor, Object[] argumentArray) {
//System.out.println("Evaluating constructor: " + constructor + " with
// arguments: " + InvokerHelper.toString(argumentArray));
//System.out.println(this.theClass);
try {
// the following patch was provided by Mori Kouhei to fix JIRA 435
/* but it opens the ctor up to everyone, so it is no longer private!
final Constructor ctor = constructor;
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
ctor.setAccessible(ctor.getDeclaringClass().equals(theClass));
return null;
}
});
*/
// end of patch
return constructor.newInstance(argumentArray);
}
catch (InvocationTargetException e) {
/*Throwable t = e.getTargetException();
if (t instanceof Error) {
Error error = (Error) t;
throw error;
}
if (t instanceof RuntimeException) {
RuntimeException runtimeEx = (RuntimeException) t;
throw runtimeEx;
}*/
throw new InvokerInvocationException(e);
}
catch (IllegalArgumentException e) {
if (coerceGStrings(argumentArray)) {
try {
return constructor.newInstance(argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (IllegalAccessException e) {
throw new GroovyRuntimeException(
"could not access constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (Exception e) {
throw new GroovyRuntimeException(
"failed to invoke constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
}
/**
* Chooses the correct method to use from a list of methods which match by
* name.
*
* @param methods
* the possible methods to choose from
* @param arguments
* the original argument to the method
* @return
*/
protected Object chooseMethod(String methodName, List methods, Class[] arguments, boolean coerce) {
int methodCount = methods.size();
if (methodCount <= 0) {
return null;
}
else if (methodCount == 1) {
Object method = methods.get(0);
if (isValidMethod(method, arguments, coerce)) {
return method;
}
return null;
}
Object answer = null;
if (arguments == null || arguments.length == 0) {
answer = chooseEmptyMethodParams(methods);
}
else if (arguments.length == 1 && arguments[0] == null) {
answer = chooseMostGeneralMethodWith1NullParam(methods);
}
else {
List matchingMethods = new ArrayList();
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes;
// making this false helps find matches
if (isValidMethod(method, arguments, coerce)) {
matchingMethods.add(method);
}
}
if (matchingMethods.isEmpty()) {
return null;
}
else if (matchingMethods.size() == 1) {
return matchingMethods.get(0);
}
return chooseMostSpecificParams(methodName, matchingMethods, arguments);
}
if (answer != null) {
return answer;
}
throw new GroovyRuntimeException(
"Could not find which method to invoke from this list: "
+ methods
+ " for arguments: "
+ InvokerHelper.toString(arguments));
}
protected boolean isValidMethod(Object method, Class[] arguments, boolean includeCoerce) {
Class[] paramTypes = getParameterTypes(method);
return isValidMethod(paramTypes, arguments, includeCoerce);
}
public static boolean isValidMethod(Class[] paramTypes, Class[] arguments, boolean includeCoerce) {
if (arguments == null) {
return true;
}
int size = arguments.length;
boolean validMethod = false;
if (paramTypes.length == size) {
// lets check the parameter types match
validMethod = true;
for (int i = 0; i < size; i++) {
if (!isCompatibleClass(paramTypes[i], arguments[i], includeCoerce)) {
validMethod = false;
}
}
}
else {
if (paramTypes.length == 1 && size == 0) {
return true;
}
}
return validMethod;
}
protected Object chooseMostSpecificParams(String name, List matchingMethods, Class[] arguments) {
for (Iterator iter = matchingMethods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
if (Arrays.equals(arguments, paramTypes)) return method;
}
Object answer = null;
int size = arguments.length;
Class[] mostSpecificTypes = null;
for (Iterator iter = matchingMethods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
if (answer == null) {
answer = method;
mostSpecificTypes = paramTypes;
}
else {
boolean useThisMethod = false;
for (int i = 0; i < size; i++) {
Class mostSpecificType = mostSpecificTypes[i];
Class type = paramTypes[i];
if (!isAssignableFrom(mostSpecificType, type)) {
useThisMethod = true;
break;
}
}
if (useThisMethod) {
if (size > 1) {
checkForInvalidOverloading(name, mostSpecificTypes, paramTypes);
}
answer = method;
mostSpecificTypes = paramTypes;
}
}
}
return answer;
}
/**
* Checks that one of the parameter types is a superset of the other and
* that the two lists of types don't conflict. e.g. foo(String, Object) and
* foo(Object, String) would conflict if called with foo("a", "b").
*
* Note that this method is only called with 2 possible signatures. i.e.
* possible invalid combinations will already have been filtered out. So if
* there were methods foo(String, Object) and foo(Object, String) then one
* of these would be already filtered out if foo was called as foo(12, "a")
*/
protected void checkForInvalidOverloading(String name, Class[] baseTypes, Class[] derivedTypes) {
for (int i = 0, size = baseTypes.length; i < size; i++) {
Class baseType = baseTypes[i];
Class derivedType = derivedTypes[i];
if (!isAssignableFrom(derivedType, baseType)) {
throw new GroovyRuntimeException(
"Ambiguous method overloading for method: "
+ name
+ ". Cannot resolve which method to invoke due to overlapping prototypes between: "
+ InvokerHelper.toString(baseTypes)
+ " and: "
+ InvokerHelper.toString(derivedTypes));
}
}
}
protected Class[] getParameterTypes(Object methodOrConstructor) {
if (methodOrConstructor instanceof MetaMethod) {
MetaMethod method = (MetaMethod) methodOrConstructor;
return method.getParameterTypes();
}
if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor;
return method.getParameterTypes();
}
if (methodOrConstructor instanceof Constructor) {
Constructor constructor = (Constructor) methodOrConstructor;
return constructor.getParameterTypes();
}
throw new IllegalArgumentException("Must be a Method or Constructor");
}
/**
* @return the method with 1 parameter which takes the most general type of
* object (e.g. Object) ignoring primitve types
*/
protected Object chooseMostGeneralMethodWith1NullParam(List methods) {
// lets look for methods with 1 argument which matches the type of the
// arguments
Class closestClass = null;
Object answer = null;
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
int paramLength = paramTypes.length;
if (paramLength == 1) {
Class theType = paramTypes[0];
if (theType.isPrimitive()) continue;
if (closestClass == null || isAssignableFrom(closestClass, theType)) {
closestClass = theType;
answer = method;
}
}
}
return answer;
}
/**
* @return the method with 1 parameter which takes the most general type of
* object (e.g. Object)
*/
protected Object chooseEmptyMethodParams(List methods) {
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
int paramLength = paramTypes.length;
if (paramLength == 0) {
return method;
}
}
return null;
}
protected static boolean isCompatibleInstance(Class type, Object value, boolean includeCoerce) {
boolean answer = value == null || type.isInstance(value);
if (!answer) {
if (type.isPrimitive()) {
if (type == int.class) {
return value instanceof Integer;
}
else if (type == double.class) {
return value instanceof Double || value instanceof Float || value instanceof Integer || value instanceof BigDecimal;
}
else if (type == boolean.class) {
return value instanceof Boolean;
}
else if (type == long.class) {
return value instanceof Long || value instanceof Integer;
}
else if (type == float.class) {
return value instanceof Float || value instanceof Integer;
}
else if (type == char.class) {
return value instanceof Character;
}
else if (type == byte.class) {
return value instanceof Byte;
}
else if (type == short.class) {
return value instanceof Short;
}
}
else if(type.isArray() && value.getClass().isArray()) {
return isCompatibleClass(type.getComponentType(), value.getClass().getComponentType(), false);
}
else if (includeCoerce) {
if (type == String.class && value instanceof GString) {
return true;
}
else if (value instanceof Number) {
// lets allow numbers to be coerced downwards?
return Number.class.isAssignableFrom(type);
}
}
}
return answer;
}
protected static boolean isCompatibleClass(Class type, Class value, boolean includeCoerce) {
boolean answer = value == null || type.isAssignableFrom(value); // this might have taken care of primitive types, rendering part of the following code unnecessary
if (!answer) {
if (type.isPrimitive()) {
if (type == int.class) {
return value == Integer.class;// || value == BigDecimal.class; //br added BigDecimal
}
else if (type == double.class) {
return value == Double.class || value == Float.class || value == Integer.class || value == BigDecimal.class;
}
else if (type == boolean.class) {
return value == Boolean.class;
}
else if (type == long.class) {
return value == Long.class || value == Integer.class; // || value == BigDecimal.class;//br added BigDecimal
}
else if (type == float.class) {
return value == Float.class || value == Integer.class; // || value == BigDecimal.class;//br added BigDecimal
}
else if (type == char.class) {
return value == Character.class;
}
else if (type == byte.class) {
return value == Byte.class;
}
else if (type == short.class) {
return value == Short.class;
}
}
else if(type.isArray() && value.isArray()) {
return isCompatibleClass(type.getComponentType(), value.getComponentType(), false);
}
else if (includeCoerce) {
//if (type == String.class && value == GString.class) {
if (type == String.class && GString.class.isAssignableFrom(value)) {
return true;
}
else if (value == Number.class) {
// lets allow numbers to be coerced downwards?
return Number.class.isAssignableFrom(type);
}
}
}
return answer;
}
protected boolean isAssignableFrom(Class mostSpecificType, Class type) {
// let's handle primitives
if (mostSpecificType.isPrimitive() && type.isPrimitive()) {
if (mostSpecificType == type) {
return true;
}
else { // note: there is not coercion for boolean and char. Range matters, precision doesn't
if (type == int.class) {
return
mostSpecificType == int.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else if (type == double.class) {
return
mostSpecificType == double.class
|| mostSpecificType == int.class
|| mostSpecificType == long.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class
|| mostSpecificType == float.class;
}
else if (type == long.class) {
return
mostSpecificType == long.class
|| mostSpecificType == int.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else if (type == float.class) {
return
mostSpecificType == float.class
|| mostSpecificType == int.class
|| mostSpecificType == long.class
|| mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else if (type == short.class) {
return
mostSpecificType == short.class
|| mostSpecificType == byte.class;
}
else {
return false;
}
}
}
boolean answer = type.isAssignableFrom(mostSpecificType);
if (!answer) {
answer = autoboxType(type).isAssignableFrom(autoboxType(mostSpecificType));
}
return answer;
}
private Class autoboxType(Class type) {
if (type.isPrimitive()) {
if (type == int.class) {
return Integer.class;
}
else if (type == double.class) {
return Double.class;
}
else if (type == long.class) {
return Long.class;
}
else if (type == boolean.class) {
return Boolean.class;
}
else if (type == float.class) {
return Float.class;
}
else if (type == char.class) {
return Character.class;
}
else if (type == byte.class) {
return Byte.class;
}
else if (type == short.class) {
return Short.class;
}
}
return type;
}
/**
* Coerces any GString instances into Strings
*
* @return true if some coercion was done.
*/
protected static boolean coerceGStrings(Object[] arguments) {
boolean coerced = false;
for (int i = 0, size = arguments.length; i < size; i++) {
Object argument = arguments[i];
if (argument instanceof GString) {
arguments[i] = argument.toString();
coerced = true;
}
}
return coerced;
}
protected boolean isGenericSetMethod(MetaMethod method) {
return (method.getName().equals("set"))
&& method.getParameterTypes().length == 2;
}
protected boolean isGenericGetMethod(MetaMethod method) {
if (method.getName().equals("get")) {
Class[] parameterTypes = method.getParameterTypes();
return parameterTypes.length == 1 && parameterTypes[0] == String.class;
}
return false;
}
private void registerMethods(boolean instanceMethods) {
Method[] methods = theClass.getMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (MethodHelper.isStatic(method)) {
Class[] paramTypes = method.getParameterTypes();
if (paramTypes.length > 0) {
Class owner = paramTypes[0];
if (instanceMethods) {
registry.lookup(owner).addNewInstanceMethod(method);
} else {
registry.lookup(owner).addNewStaticMethod(method);
}
}
}
}
}
protected void registerStaticMethods() {
registerMethods(false);
}
protected void registerInstanceMethods() {
registerMethods(true);
}
protected String capitalize(String property) {
return property.substring(0, 1).toUpperCase() + property.substring(1, property.length());
}
/**
* Call this method when any mutation method is called, such as adding a new
* method to this MetaClass so that any caching or bytecode generation can be
* regenerated.
*/
protected synchronized void onMethodChange() {
reflector = null;
}
protected synchronized void checkInitialised() {
if (!initialised) {
initialised = true;
addInheritedMethods(theClass);
}
if (reflector == null) {
generateReflector();
}
}
protected MetaMethod createMetaMethod(final Method method) {
if (registry.useAccessible()) {
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
method.setAccessible(true);
return null;
}
});
}
if (useReflection) {
//log.warning("Creating reflection based dispatcher for: " + method);
return new ReflectionMetaMethod(method);
}
MetaMethod answer = new MetaMethod(method);
if (isValidReflectorMethod(answer)) {
allMethods.add(answer);
answer.setMethodIndex(allMethods.size());
}
else {
//log.warning("Creating reflection based dispatcher for: " + method);
answer = new ReflectionMetaMethod(method);
}
return answer;
}
protected boolean isValidReflectorMethod(MetaMethod method) {
// We cannot use a reflector if the method is private, protected, or package accessible only.
if (!method.isPublic()) {
return false;
}
Class declaringClass = method.getDeclaringClass();
if (!Modifier.isPublic(declaringClass.getModifiers())) {
// lets see if this method is implemented on an interface
List list = getInterfaceMethods();
for (Iterator iter = list.iterator(); iter.hasNext();) {
MetaMethod aMethod = (MetaMethod) iter.next();
if (method.isSame(aMethod)) {
method.setInterfaceClass(aMethod.getDeclaringClass());
return true;
}
}
/** todo */
//log.warning("Cannot invoke method on protected/private class which isn't visible on an interface so must use reflection instead: " + method);
return false;
}
return true;
}
protected void generateReflector() {
reflector = loadReflector(allMethods);
if (reflector == null) {
throw new RuntimeException("Should have a reflector!");
}
// lets set the reflector on all the methods
for (Iterator iter = allMethods.iterator(); iter.hasNext();) {
MetaMethod metaMethod = (MetaMethod) iter.next();
//System.out.println("Setting reflector for method: " + metaMethod + " with index: " + metaMethod.getMethodIndex());
metaMethod.setReflector(reflector);
}
}
protected Reflector loadReflector(List methods) {
ReflectorGenerator generator = new ReflectorGenerator(methods);
String className = theClass.getName();
String packagePrefix = "gjdk.";
/*
if (className.startsWith("java.")) {
packagePrefix = "gjdk.";
}
*/
String name = packagePrefix + className + "_GroovyReflector";
if (theClass.isArray()) {
String componentName = theClass.getComponentType().getName();
/*
if (componentName.startsWith("java.")) {
packagePrefix = "gjdk.";
}
*/
name = packagePrefix + componentName + "_GroovyReflectorArray";
}
// lets see if its already loaded
try {
Class type = loadReflectorClass(name);
return (Reflector) type.newInstance();
}
catch (AccessControlException ace) {
//Don't ignore this exception type
throw ace;
}
catch (Exception e) {
// lets ignore, lets generate it && load it
}
ClassWriter cw = new ClassWriter(true);
generator.generate(cw, name);
byte[] bytecode = cw.toByteArray();
try {
Class type = loadReflectorClass(name, bytecode);
return (Reflector) type.newInstance();
}
catch (Exception e) {
throw new GroovyRuntimeException("Could not load the reflector for class: " + name + ". Reason: " + e, e);
}
}
protected Class loadReflectorClass(final String name, final byte[] bytecode) throws ClassNotFoundException {
ClassLoader loader = theClass.getClassLoader();
if (loader instanceof GroovyClassLoader) {
final GroovyClassLoader gloader = (GroovyClassLoader) loader;
return (Class) AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
return gloader.defineClass(name, bytecode, getClass().getProtectionDomain());
}
});
}
return registry.loadClass(name, bytecode);
}
protected Class loadReflectorClass(String name) throws ClassNotFoundException {
ClassLoader loader = theClass.getClassLoader();
if (loader instanceof GroovyClassLoader) {
GroovyClassLoader gloader = (GroovyClassLoader) loader;
return gloader.loadClass(name);
}
return registry.loadClass(name);
}
public List getMethods() {
return allMethods;
}
public List getMetaMethods() {
return (List) ((ArrayList)newGroovyMethodsList).clone();
}
protected synchronized List getInterfaceMethods() {
if (interfaceMethods == null) {
interfaceMethods = new ArrayList();
Class type = theClass;
while (type != null) {
Class[] interfaces = type.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
Class iface = interfaces[i];
Method[] methods = iface.getMethods();
addInterfaceMethods(interfaceMethods, methods);
}
type = type.getSuperclass();
}
}
return interfaceMethods;
}
private void addInterfaceMethods(List list, Method[] methods) {
for (int i = 0; i < methods.length; i++) {
list.add(createMetaMethod(methods[i]));
}
}
/**
* param instance array to the type array
* @param args
* @return
*/
Class[] convertToTypeArray(Object[] args) {
if (args == null)
return null;
int s = args.length;
Class[] ans = new Class[s];
for (int i = 0; i < s; i++) {
Object o = args[i];
if (o != null) {
ans[i] = o.getClass();
} else {
ans[i] = null;
}
}
return ans;
}
}
|
added test if coerce was really successfull to avoid endless loops
git-svn-id: aa43ce4553b005588bb3cc6c16966320b011facb@1779 a5544e8c-8a19-0410-ba12-f9af4593a198
|
src/main/groovy/lang/MetaClass.java
|
added test if coerce was really successfull to avoid endless loops
|
|
Java
|
apache-2.0
|
b93282dbfecf4959dc1753f59dd73da5e4046395
| 0
|
grro/http2
|
/*
* Copyright (c) 2015 Gregor Roth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.redzoo.article.javaworld.http2;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.ContentResponse;
import org.eclipse.jetty.http2.client.HTTP2Client;
import org.eclipse.jetty.http2.client.http.HttpClientTransportOverHTTP2;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@SuppressWarnings("serial")
public class HighLevelHttp2ClientTest {
private WebServer server;
@Before
public void before() {
class MyServlet extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.getOutputStream().write("...my body data...".getBytes());
}
};
server = WebServer.servlet(new MyServlet())
.start();
}
@After
public void after() {
server.stop();
}
@Test
public void highLevelApiTest() throws Exception {
HTTP2Client lowLevelClient = new HTTP2Client();
lowLevelClient.start();
HttpClient client = new HttpClient(new HttpClientTransportOverHTTP2(lowLevelClient), null);
client.start();
ContentResponse response = client.GET("http://localhost:" + server.getLocalport());
System.out.println(response.getVersion() + " " + response.getStatus() + " ");
Assert.assertEquals("...my body data...", new String(response.getContent()));
client.stop();
server.stop();
}
}
|
src/test/java/eu/redzoo/article/javaworld/http2/HighLevelHttp2ClientTest.java
|
/*
* Copyright (c) 2015 Gregor Roth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.redzoo.article.javaworld.http2;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.ContentResponse;
import org.eclipse.jetty.http2.client.HTTP2Client;
import org.eclipse.jetty.http2.client.http.HttpClientTransportOverHTTP2;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@SuppressWarnings("serial")
public class HighLevelHttp2ClientTest {
private WebServer server;
@Before
public void before() {
class MyServlet extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.getOutputStream().write("...my body data...".getBytes());
}
};
server = WebServer.servlet(new MyServlet())
.start();
}
@After
public void after() {
server.stop();
}
@Test
public void lowLevelApiTest() throws Exception {
HTTP2Client lowLevelClient = new HTTP2Client();
lowLevelClient.start();
HttpClient client = new HttpClient(new HttpClientTransportOverHTTP2(lowLevelClient), null);
client.start();
ContentResponse response = client.GET("http://localhost:" + server.getLocalport());
System.out.println(response.getVersion() + " " + response.getStatus() + " ");
Assert.assertEquals("...my body data...", new String(response.getContent()));
client.stop();
server.stop();
}
}
|
test method renamed
|
src/test/java/eu/redzoo/article/javaworld/http2/HighLevelHttp2ClientTest.java
|
test method renamed
|
|
Java
|
apache-2.0
|
eef2afe829a7e1f7955dc67b9d50be8caf87cb1d
| 0
|
dmvolod/camel,haku/camel,dpocock/camel,zregvart/camel,snadakuduru/camel,tlehoux/camel,arnaud-deprez/camel,NetNow/camel,mike-kukla/camel,koscejev/camel,veithen/camel,chirino/camel,ramonmaruko/camel,dkhanolkar/camel,dmvolod/camel,dmvolod/camel,nboukhed/camel,qst-jdc-labs/camel,mgyongyosi/camel,dvankleef/camel,hqstevenson/camel,chirino/camel,askannon/camel,anton-k11/camel,MohammedHammam/camel,sebi-hgdata/camel,bdecoste/camel,manuelh9r/camel,tdiesler/camel,mike-kukla/camel,johnpoth/camel,alvinkwekel/camel,nicolaferraro/camel,aaronwalker/camel,erwelch/camel,davidwilliams1978/camel,eformat/camel,arnaud-deprez/camel,MohammedHammam/camel,YMartsynkevych/camel,jollygeorge/camel,yogamaha/camel,veithen/camel,pmoerenhout/camel,tarilabs/camel,sverkera/camel,stalet/camel,davidwilliams1978/camel,chanakaudaya/camel,tkopczynski/camel,bgaudaen/camel,mnki/camel,jollygeorge/camel,borcsokj/camel,YoshikiHigo/camel,tarilabs/camel,yogamaha/camel,tdiesler/camel,isavin/camel,erwelch/camel,drsquidop/camel,arnaud-deprez/camel,engagepoint/camel,salikjan/camel,davidkarlsen/camel,MrCoder/camel,nboukhed/camel,anoordover/camel,acartapanis/camel,JYBESSON/camel,bfitzpat/camel,noelo/camel,onders86/camel,neoramon/camel,maschmid/camel,jarst/camel,iweiss/camel,objectiser/camel,gnodet/camel,duro1/camel,lburgazzoli/camel,ekprayas/camel,acartapanis/camel,rparree/camel,haku/camel,maschmid/camel,sebi-hgdata/camel,dsimansk/camel,jpav/camel,nikvaessen/camel,jpav/camel,royopa/camel,snadakuduru/camel,grange74/camel,duro1/camel,CandleCandle/camel,NickCis/camel,isururanawaka/camel,jkorab/camel,christophd/camel,veithen/camel,curso007/camel,noelo/camel,cunningt/camel,isururanawaka/camel,NickCis/camel,mohanaraosv/camel,tadayosi/camel,gilfernandes/camel,dmvolod/camel,anoordover/camel,tadayosi/camel,ge0ffrey/camel,neoramon/camel,drsquidop/camel,davidkarlsen/camel,skinzer/camel,kevinearls/camel,gautric/camel,stalet/camel,mgyongyosi/camel,jameszkw/camel,tdiesler/camel,jlpedrosa/camel,cunningt/camel,woj-i/camel,bdecoste/camel,FingolfinTEK/camel,lowwool/camel,lburgazzoli/camel,chanakaudaya/camel,scranton/camel,MohammedHammam/camel,johnpoth/camel,bdecoste/camel,gilfernandes/camel,chanakaudaya/camel,chirino/camel,cunningt/camel,mnki/camel,maschmid/camel,sebi-hgdata/camel,apache/camel,grgrzybek/camel,brreitme/camel,isururanawaka/camel,nikhilvibhav/camel,anton-k11/camel,jpav/camel,sirlatrom/camel,edigrid/camel,brreitme/camel,NetNow/camel,JYBESSON/camel,ullgren/camel,snurmine/camel,satishgummadelli/camel,stravag/camel,gnodet/camel,curso007/camel,mnki/camel,drsquidop/camel,iweiss/camel,sirlatrom/camel,engagepoint/camel,partis/camel,joakibj/camel,atoulme/camel,sverkera/camel,grgrzybek/camel,yuruki/camel,pax95/camel,sirlatrom/camel,zregvart/camel,mcollovati/camel,edigrid/camel,CandleCandle/camel,isavin/camel,gautric/camel,grange74/camel,sebi-hgdata/camel,mohanaraosv/camel,jameszkw/camel,logzio/camel,ullgren/camel,acartapanis/camel,yury-vashchyla/camel,apache/camel,josefkarasek/camel,stravag/camel,borcsokj/camel,koscejev/camel,scranton/camel,jamesnetherton/camel,davidwilliams1978/camel,mike-kukla/camel,ssharma/camel,jarst/camel,satishgummadelli/camel,koscejev/camel,manuelh9r/camel,apache/camel,jamesnetherton/camel,FingolfinTEK/camel,bfitzpat/camel,qst-jdc-labs/camel,objectiser/camel,dsimansk/camel,woj-i/camel,jkorab/camel,jamesnetherton/camel,pkletsko/camel,coderczp/camel,anoordover/camel,hqstevenson/camel,anton-k11/camel,jkorab/camel,engagepoint/camel,oalles/camel,pplatek/camel,eformat/camel,manuelh9r/camel,oscerd/camel,isururanawaka/camel,driseley/camel,lasombra/camel,mohanaraosv/camel,isavin/camel,tadayosi/camel,grgrzybek/camel,tkopczynski/camel,yuruki/camel,allancth/camel,ekprayas/camel,oscerd/camel,pplatek/camel,manuelh9r/camel,ge0ffrey/camel,rparree/camel,jkorab/camel,jkorab/camel,ullgren/camel,lowwool/camel,ssharma/camel,bfitzpat/camel,borcsokj/camel,akhettar/camel,FingolfinTEK/camel,tlehoux/camel,rmarting/camel,royopa/camel,rmarting/camel,gyc567/camel,gyc567/camel,lowwool/camel,bgaudaen/camel,tlehoux/camel,acartapanis/camel,hqstevenson/camel,igarashitm/camel,NickCis/camel,FingolfinTEK/camel,sverkera/camel,prashant2402/camel,trohovsky/camel,johnpoth/camel,dmvolod/camel,trohovsky/camel,yuruki/camel,woj-i/camel,Thopap/camel,gilfernandes/camel,qst-jdc-labs/camel,gnodet/camel,chanakaudaya/camel,jmandawg/camel,stalet/camel,nikhilvibhav/camel,sirlatrom/camel,mzapletal/camel,allancth/camel,hqstevenson/camel,jarst/camel,lasombra/camel,YMartsynkevych/camel,oscerd/camel,kevinearls/camel,woj-i/camel,yogamaha/camel,mnki/camel,adessaigne/camel,yury-vashchyla/camel,oalles/camel,jarst/camel,chirino/camel,trohovsky/camel,veithen/camel,bhaveshdt/camel,jarst/camel,yogamaha/camel,pmoerenhout/camel,RohanHart/camel,davidwilliams1978/camel,apache/camel,alvinkwekel/camel,ekprayas/camel,DariusX/camel,sebi-hgdata/camel,punkhorn/camel-upstream,bhaveshdt/camel,satishgummadelli/camel,igarashitm/camel,NetNow/camel,ekprayas/camel,pax95/camel,rmarting/camel,tdiesler/camel,mcollovati/camel,joakibj/camel,aaronwalker/camel,snurmine/camel,johnpoth/camel,nikvaessen/camel,dvankleef/camel,coderczp/camel,christophd/camel,neoramon/camel,ssharma/camel,jonmcewen/camel,adessaigne/camel,onders86/camel,royopa/camel,grange74/camel,josefkarasek/camel,gautric/camel,pplatek/camel,ramonmaruko/camel,nikhilvibhav/camel,duro1/camel,lburgazzoli/camel,lasombra/camel,ekprayas/camel,dmvolod/camel,CandleCandle/camel,pplatek/camel,nikvaessen/camel,bhaveshdt/camel,snurmine/camel,jonmcewen/camel,edigrid/camel,jmandawg/camel,mike-kukla/camel,igarashitm/camel,rparree/camel,pkletsko/camel,jonmcewen/camel,jonmcewen/camel,skinzer/camel,lburgazzoli/camel,lburgazzoli/apache-camel,igarashitm/camel,allancth/camel,partis/camel,apache/camel,Fabryprog/camel,pax95/camel,stalet/camel,dvankleef/camel,mcollovati/camel,partis/camel,MrCoder/camel,jamesnetherton/camel,bhaveshdt/camel,eformat/camel,tarilabs/camel,bfitzpat/camel,jameszkw/camel,YMartsynkevych/camel,josefkarasek/camel,jameszkw/camel,punkhorn/camel-upstream,MohammedHammam/camel,YoshikiHigo/camel,driseley/camel,yury-vashchyla/camel,JYBESSON/camel,igarashitm/camel,pkletsko/camel,MohammedHammam/camel,jollygeorge/camel,bdecoste/camel,pplatek/camel,joakibj/camel,jmandawg/camel,sabre1041/camel,sabre1041/camel,CodeSmell/camel,pax95/camel,driseley/camel,w4tson/camel,josefkarasek/camel,prashant2402/camel,driseley/camel,brreitme/camel,mzapletal/camel,josefkarasek/camel,davidwilliams1978/camel,erwelch/camel,grange74/camel,gyc567/camel,logzio/camel,tadayosi/camel,iweiss/camel,YMartsynkevych/camel,dkhanolkar/camel,grange74/camel,lburgazzoli/camel,snurmine/camel,dsimansk/camel,oscerd/camel,coderczp/camel,sirlatrom/camel,partis/camel,onders86/camel,sabre1041/camel,onders86/camel,drsquidop/camel,tarilabs/camel,ramonmaruko/camel,jameszkw/camel,jlpedrosa/camel,akhettar/camel,RohanHart/camel,pmoerenhout/camel,mohanaraosv/camel,gnodet/camel,lasombra/camel,punkhorn/camel-upstream,YMartsynkevych/camel,curso007/camel,lowwool/camel,anoordover/camel,dvankleef/camel,RohanHart/camel,akhettar/camel,Thopap/camel,logzio/camel,joakibj/camel,ullgren/camel,arnaud-deprez/camel,brreitme/camel,maschmid/camel,lowwool/camel,mohanaraosv/camel,aaronwalker/camel,stalet/camel,tdiesler/camel,noelo/camel,jonmcewen/camel,nikvaessen/camel,bgaudaen/camel,yuruki/camel,MohammedHammam/camel,mzapletal/camel,ge0ffrey/camel,anton-k11/camel,davidkarlsen/camel,snadakuduru/camel,mcollovati/camel,askannon/camel,christophd/camel,grgrzybek/camel,trohovsky/camel,snadakuduru/camel,DariusX/camel,NetNow/camel,mzapletal/camel,joakibj/camel,scranton/camel,ssharma/camel,oalles/camel,sabre1041/camel,kevinearls/camel,skinzer/camel,jollygeorge/camel,aaronwalker/camel,pkletsko/camel,pmoerenhout/camel,snurmine/camel,edigrid/camel,punkhorn/camel-upstream,bgaudaen/camel,cunningt/camel,skinzer/camel,dkhanolkar/camel,jlpedrosa/camel,pmoerenhout/camel,ssharma/camel,YoshikiHigo/camel,noelo/camel,brreitme/camel,satishgummadelli/camel,jpav/camel,atoulme/camel,acartapanis/camel,snurmine/camel,kevinearls/camel,isavin/camel,logzio/camel,jkorab/camel,bfitzpat/camel,nicolaferraro/camel,atoulme/camel,trohovsky/camel,dsimansk/camel,atoulme/camel,christophd/camel,johnpoth/camel,FingolfinTEK/camel,anton-k11/camel,tadayosi/camel,acartapanis/camel,gautric/camel,prashant2402/camel,atoulme/camel,stravag/camel,duro1/camel,hqstevenson/camel,CandleCandle/camel,ekprayas/camel,yuruki/camel,oalles/camel,edigrid/camel,iweiss/camel,CodeSmell/camel,koscejev/camel,chirino/camel,NetNow/camel,chanakaudaya/camel,dsimansk/camel,logzio/camel,erwelch/camel,joakibj/camel,snadakuduru/camel,coderczp/camel,koscejev/camel,alvinkwekel/camel,sverkera/camel,isururanawaka/camel,cunningt/camel,arnaud-deprez/camel,snadakuduru/camel,ge0ffrey/camel,Thopap/camel,NickCis/camel,oalles/camel,jlpedrosa/camel,jameszkw/camel,jarst/camel,borcsokj/camel,drsquidop/camel,scranton/camel,anoordover/camel,rparree/camel,edigrid/camel,oalles/camel,pkletsko/camel,nboukhed/camel,pplatek/camel,bdecoste/camel,JYBESSON/camel,dsimansk/camel,dvankleef/camel,hqstevenson/camel,mzapletal/camel,skinzer/camel,engagepoint/camel,mnki/camel,dpocock/camel,nicolaferraro/camel,objectiser/camel,gyc567/camel,tarilabs/camel,borcsokj/camel,jpav/camel,scranton/camel,lburgazzoli/apache-camel,stravag/camel,adessaigne/camel,yury-vashchyla/camel,tdiesler/camel,curso007/camel,nboukhed/camel,maschmid/camel,askannon/camel,cunningt/camel,mike-kukla/camel,lburgazzoli/camel,MrCoder/camel,veithen/camel,mohanaraosv/camel,haku/camel,logzio/camel,YoshikiHigo/camel,apache/camel,MrCoder/camel,lburgazzoli/apache-camel,mgyongyosi/camel,mgyongyosi/camel,gyc567/camel,mnki/camel,gilfernandes/camel,lburgazzoli/apache-camel,satishgummadelli/camel,allancth/camel,haku/camel,YoshikiHigo/camel,davidkarlsen/camel,jollygeorge/camel,w4tson/camel,noelo/camel,bgaudaen/camel,akhettar/camel,borcsokj/camel,rmarting/camel,stravag/camel,duro1/camel,MrCoder/camel,askannon/camel,sverkera/camel,nboukhed/camel,pkletsko/camel,mgyongyosi/camel,erwelch/camel,gyc567/camel,NickCis/camel,scranton/camel,maschmid/camel,pmoerenhout/camel,tkopczynski/camel,Thopap/camel,dvankleef/camel,onders86/camel,adessaigne/camel,atoulme/camel,jmandawg/camel,CodeSmell/camel,yury-vashchyla/camel,jamesnetherton/camel,neoramon/camel,askannon/camel,dkhanolkar/camel,tkopczynski/camel,iweiss/camel,ge0ffrey/camel,johnpoth/camel,dpocock/camel,eformat/camel,salikjan/camel,logzio/camel,RohanHart/camel,allancth/camel,jmandawg/camel,yogamaha/camel,driseley/camel,nicolaferraro/camel,jlpedrosa/camel,w4tson/camel,engagepoint/camel,duro1/camel,kevinearls/camel,manuelh9r/camel,yuruki/camel,ramonmaruko/camel,neoramon/camel,gnodet/camel,DariusX/camel,JYBESSON/camel,RohanHart/camel,gautric/camel,Fabryprog/camel,grange74/camel,bfitzpat/camel,mgyongyosi/camel,tadayosi/camel,christophd/camel,zregvart/camel,Thopap/camel,kevinearls/camel,adessaigne/camel,Thopap/camel,yury-vashchyla/camel,objectiser/camel,JYBESSON/camel,w4tson/camel,noelo/camel,jamesnetherton/camel,akhettar/camel,isururanawaka/camel,veithen/camel,nikvaessen/camel,YoshikiHigo/camel,igarashitm/camel,koscejev/camel,lasombra/camel,bgaudaen/camel,adessaigne/camel,coderczp/camel,askannon/camel,stalet/camel,ramonmaruko/camel,dpocock/camel,curso007/camel,dpocock/camel,mike-kukla/camel,sverkera/camel,partis/camel,yogamaha/camel,bdecoste/camel,coderczp/camel,CandleCandle/camel,rparree/camel,dkhanolkar/camel,YMartsynkevych/camel,curso007/camel,pax95/camel,tlehoux/camel,ge0ffrey/camel,gautric/camel,alvinkwekel/camel,aaronwalker/camel,partis/camel,skinzer/camel,nikvaessen/camel,chanakaudaya/camel,stravag/camel,mzapletal/camel,tlehoux/camel,prashant2402/camel,grgrzybek/camel,jmandawg/camel,Fabryprog/camel,nikhilvibhav/camel,qst-jdc-labs/camel,NickCis/camel,josefkarasek/camel,tkopczynski/camel,anton-k11/camel,RohanHart/camel,sirlatrom/camel,sabre1041/camel,arnaud-deprez/camel,rparree/camel,rmarting/camel,isavin/camel,aaronwalker/camel,sabre1041/camel,eformat/camel,trohovsky/camel,pplatek/camel,christophd/camel,oscerd/camel,w4tson/camel,qst-jdc-labs/camel,MrCoder/camel,lowwool/camel,woj-i/camel,Fabryprog/camel,driseley/camel,DariusX/camel,jpav/camel,bhaveshdt/camel,jonmcewen/camel,jollygeorge/camel,allancth/camel,w4tson/camel,lasombra/camel,royopa/camel,ssharma/camel,jlpedrosa/camel,lburgazzoli/apache-camel,dpocock/camel,royopa/camel,akhettar/camel,oscerd/camel,tarilabs/camel,gilfernandes/camel,FingolfinTEK/camel,brreitme/camel,iweiss/camel,rmarting/camel,sebi-hgdata/camel,lburgazzoli/apache-camel,manuelh9r/camel,CandleCandle/camel,davidwilliams1978/camel,pax95/camel,bhaveshdt/camel,royopa/camel,chirino/camel,tkopczynski/camel,qst-jdc-labs/camel,satishgummadelli/camel,anoordover/camel,eformat/camel,neoramon/camel,prashant2402/camel,dkhanolkar/camel,tlehoux/camel,CodeSmell/camel,haku/camel,ramonmaruko/camel,NetNow/camel,gilfernandes/camel,prashant2402/camel,grgrzybek/camel,onders86/camel,isavin/camel,zregvart/camel,woj-i/camel,drsquidop/camel,nboukhed/camel,haku/camel,erwelch/camel
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import org.apache.camel.Expression;
import org.apache.camel.Processor;
import org.apache.camel.model.LoggingLevel;
import org.apache.camel.processor.DeadLetterChannel;
import org.apache.camel.processor.ErrorHandlerSupport;
import org.apache.camel.processor.Logger;
import org.apache.camel.processor.RecipientList;
import org.apache.camel.processor.RedeliveryPolicy;
import org.apache.camel.processor.exceptionpolicy.ExceptionPolicyStrategy;
import org.apache.camel.spi.RouteContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* A builder of a <a
* href="http://activemq.apache.org/camel/dead-letter-channel.html">Dead Letter
* Channel</a>
*
* @version $Revision$
*/
public class DeadLetterChannelBuilder extends ErrorHandlerBuilderSupport {
private RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
private ExceptionPolicyStrategy exceptionPolicyStrategy = ErrorHandlerSupport.createDefaultExceptionPolicyStrategy();
private ProcessorFactory deadLetterFactory;
private Processor defaultDeadLetterEndpoint;
private Expression defaultDeadLetterEndpointExpression;
private String defaultDeadLetterEndpointUri = "log:org.apache.camel.DeadLetterChannel?level=error";
private Logger logger = DeadLetterChannel.createDefaultLogger();
public DeadLetterChannelBuilder() {
}
public DeadLetterChannelBuilder(Processor processor) {
this(new ConstantProcessorBuilder(processor));
}
public DeadLetterChannelBuilder(ProcessorFactory deadLetterFactory) {
this.deadLetterFactory = deadLetterFactory;
}
public ErrorHandlerBuilder copy() {
DeadLetterChannelBuilder answer = new DeadLetterChannelBuilder(deadLetterFactory);
answer.setRedeliveryPolicy(getRedeliveryPolicy().copy());
return answer;
}
public Processor createErrorHandler(RouteContext routeContext, Processor processor) throws Exception {
Processor deadLetter = getDeadLetterFactory().createProcessor();
DeadLetterChannel answer = new DeadLetterChannel(processor, deadLetter, getRedeliveryPolicy(), getLogger(), getExceptionPolicyStrategy());
configure(answer);
return answer;
}
// Builder methods
// -------------------------------------------------------------------------
public DeadLetterChannelBuilder backOffMultiplier(double backOffMultiplier) {
getRedeliveryPolicy().backOffMultiplier(backOffMultiplier);
return this;
}
public DeadLetterChannelBuilder collisionAvoidancePercent(short collisionAvoidancePercent) {
getRedeliveryPolicy().collisionAvoidancePercent(collisionAvoidancePercent);
return this;
}
public DeadLetterChannelBuilder delay(long delay) {
getRedeliveryPolicy().delay(delay);
return this;
}
public DeadLetterChannelBuilder maximumRedeliveries(int maximumRedeliveries) {
getRedeliveryPolicy().maximumRedeliveries(maximumRedeliveries);
return this;
}
public DeadLetterChannelBuilder maximumRedeliveryDelay(long maximumRedeliveryDelay) {
getRedeliveryPolicy().maximumRedeliveryDelay(maximumRedeliveryDelay);
return this;
}
public DeadLetterChannelBuilder useCollisionAvoidance() {
getRedeliveryPolicy().useCollisionAvoidance();
return this;
}
public DeadLetterChannelBuilder useExponentialBackOff() {
getRedeliveryPolicy().useExponentialBackOff();
return this;
}
public DeadLetterChannelBuilder retriesExhaustedLogLevel(LoggingLevel retriesExhaustedLogLevel) {
getRedeliveryPolicy().setRetriesExhaustedLogLevel(retriesExhaustedLogLevel);
return this;
}
public DeadLetterChannelBuilder retryAttemptedLogLevel(LoggingLevel retryAttemptedLogLevel) {
getRedeliveryPolicy().setRetryAttemptedLogLevel(retryAttemptedLogLevel);
return this;
}
/**
* Sets the logger used for caught exceptions
*/
public DeadLetterChannelBuilder logger(Logger logger) {
setLogger(logger);
return this;
}
/**
* Sets the logging level of exceptions caught
*/
public DeadLetterChannelBuilder loggingLevel(LoggingLevel level) {
getLogger().setLevel(level);
return this;
}
/**
* Sets the log used for caught exceptions
*/
public DeadLetterChannelBuilder log(Log log) {
getLogger().setLog(log);
return this;
}
/**
* Sets the log used for caught exceptions
*/
public DeadLetterChannelBuilder log(String log) {
return log(LogFactory.getLog(log));
}
/**
* Sets the log used for caught exceptions
*/
public DeadLetterChannelBuilder log(Class log) {
return log(LogFactory.getLog(log));
}
/**
* Sets the exception policy to use
*/
public DeadLetterChannelBuilder exceptionPolicyStrategy(ExceptionPolicyStrategy exceptionPolicyStrategy) {
setExceptionPolicyStrategy(exceptionPolicyStrategy);
return this;
}
// Properties
// -------------------------------------------------------------------------
public RedeliveryPolicy getRedeliveryPolicy() {
return redeliveryPolicy;
}
/**
* Sets the redelivery policy
*/
public void setRedeliveryPolicy(RedeliveryPolicy redeliveryPolicy) {
this.redeliveryPolicy = redeliveryPolicy;
}
public ProcessorFactory getDeadLetterFactory() {
if (deadLetterFactory == null) {
deadLetterFactory = new ProcessorFactory() {
public Processor createProcessor() {
return getDefaultDeadLetterEndpoint();
}
};
}
return deadLetterFactory;
}
/**
* Sets the default dead letter queue factory
*/
public void setDeadLetterFactory(ProcessorFactory deadLetterFactory) {
this.deadLetterFactory = deadLetterFactory;
}
public Processor getDefaultDeadLetterEndpoint() {
if (defaultDeadLetterEndpoint == null) {
defaultDeadLetterEndpoint = new RecipientList(getDefaultDeadLetterEndpointExpression());
}
return defaultDeadLetterEndpoint;
}
/**
* Sets the default dead letter endpoint used
*/
public void setDefaultDeadLetterEndpoint(Processor defaultDeadLetterEndpoint) {
this.defaultDeadLetterEndpoint = defaultDeadLetterEndpoint;
}
public Expression getDefaultDeadLetterEndpointExpression() {
if (defaultDeadLetterEndpointExpression == null) {
defaultDeadLetterEndpointExpression = ExpressionBuilder
.constantExpression(getDefaultDeadLetterEndpointUri());
}
return defaultDeadLetterEndpointExpression;
}
/**
* Sets the expression used to decide the dead letter channel endpoint for
* an exchange if no factory is provided via
* {@link #setDeadLetterFactory(ProcessorFactory)}
*/
public void setDefaultDeadLetterEndpointExpression(Expression defaultDeadLetterEndpointExpression) {
this.defaultDeadLetterEndpointExpression = defaultDeadLetterEndpointExpression;
}
public String getDefaultDeadLetterEndpointUri() {
return defaultDeadLetterEndpointUri;
}
/**
* Sets the default dead letter endpoint URI used if no factory is provided
* via {@link #setDeadLetterFactory(ProcessorFactory)} and no expression is
* provided via {@link #setDefaultDeadLetterEndpointExpression(Expression)}
*
* @param defaultDeadLetterEndpointUri the default URI if no deadletter
* factory or expression is provided
*/
public void setDefaultDeadLetterEndpointUri(String defaultDeadLetterEndpointUri) {
this.defaultDeadLetterEndpointUri = defaultDeadLetterEndpointUri;
}
public Logger getLogger() {
return logger;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
/**
* Sets the exception policy strategy to use for resolving the {@link org.apache.camel.model.ExceptionType}
* to use for a given thrown exception
*/
public ExceptionPolicyStrategy getExceptionPolicyStrategy() {
return exceptionPolicyStrategy;
}
public void setExceptionPolicyStrategy(ExceptionPolicyStrategy exceptionPolicyStrategy) {
this.exceptionPolicyStrategy = exceptionPolicyStrategy;
}
@Override
public String toString() {
return "DeadLetterChannelBuilder(" + (deadLetterFactory != null ? deadLetterFactory : defaultDeadLetterEndpoint) + ")";
}
}
|
camel-core/src/main/java/org/apache/camel/builder/DeadLetterChannelBuilder.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import org.apache.camel.Expression;
import org.apache.camel.Processor;
import org.apache.camel.model.LoggingLevel;
import org.apache.camel.processor.DeadLetterChannel;
import org.apache.camel.processor.ErrorHandlerSupport;
import org.apache.camel.processor.Logger;
import org.apache.camel.processor.RecipientList;
import org.apache.camel.processor.RedeliveryPolicy;
import org.apache.camel.processor.exceptionpolicy.ExceptionPolicyStrategy;
import org.apache.camel.spi.RouteContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* A builder of a <a
* href="http://activemq.apache.org/camel/dead-letter-channel.html">Dead Letter
* Channel</a>
*
* @version $Revision$
*/
public class DeadLetterChannelBuilder extends ErrorHandlerBuilderSupport {
private RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
private ExceptionPolicyStrategy exceptionPolicyStrategy = ErrorHandlerSupport.createDefaultExceptionPolicyStrategy();
private ProcessorFactory deadLetterFactory;
private Processor defaultDeadLetterEndpoint;
private Expression defaultDeadLetterEndpointExpression;
private String defaultDeadLetterEndpointUri = "log:org.apache.camel.DeadLetterChannel?level=error";
private Logger logger = DeadLetterChannel.createDefaultLogger();
public DeadLetterChannelBuilder() {
}
public DeadLetterChannelBuilder(Processor processor) {
this(new ConstantProcessorBuilder(processor));
}
public DeadLetterChannelBuilder(ProcessorFactory deadLetterFactory) {
this.deadLetterFactory = deadLetterFactory;
}
public ErrorHandlerBuilder copy() {
DeadLetterChannelBuilder answer = new DeadLetterChannelBuilder(deadLetterFactory);
answer.setRedeliveryPolicy(getRedeliveryPolicy().copy());
return answer;
}
public Processor createErrorHandler(RouteContext routeContext, Processor processor) throws Exception {
Processor deadLetter = getDeadLetterFactory().createProcessor();
DeadLetterChannel answer = new DeadLetterChannel(processor, deadLetter, getRedeliveryPolicy(), getLogger(), getExceptionPolicyStrategy());
configure(answer);
return answer;
}
// Builder methods
// -------------------------------------------------------------------------
public DeadLetterChannelBuilder backOffMultiplier(double backOffMultiplier) {
getRedeliveryPolicy().backOffMultiplier(backOffMultiplier);
return this;
}
public DeadLetterChannelBuilder collisionAvoidancePercent(short collisionAvoidancePercent) {
getRedeliveryPolicy().collisionAvoidancePercent(collisionAvoidancePercent);
return this;
}
public DeadLetterChannelBuilder delay(long delay) {
getRedeliveryPolicy().delay(delay);
return this;
}
public DeadLetterChannelBuilder maximumRedeliveries(int maximumRedeliveries) {
getRedeliveryPolicy().maximumRedeliveries(maximumRedeliveries);
return this;
}
public DeadLetterChannelBuilder maximumRedeliveryDelay(long maximumRedeliveryDelay) {
getRedeliveryPolicy().maximumRedeliveryDelay(maximumRedeliveryDelay);
return this;
}
public DeadLetterChannelBuilder useCollisionAvoidance() {
getRedeliveryPolicy().useCollisionAvoidance();
return this;
}
public DeadLetterChannelBuilder useExponentialBackOff() {
getRedeliveryPolicy().useExponentialBackOff();
return this;
}
/**
* Sets the logger used for caught exceptions
*/
public DeadLetterChannelBuilder logger(Logger logger) {
setLogger(logger);
return this;
}
/**
* Sets the logging level of exceptions caught
*/
public DeadLetterChannelBuilder loggingLevel(LoggingLevel level) {
getLogger().setLevel(level);
return this;
}
/**
* Sets the log used for caught exceptions
*/
public DeadLetterChannelBuilder log(Log log) {
getLogger().setLog(log);
return this;
}
/**
* Sets the log used for caught exceptions
*/
public DeadLetterChannelBuilder log(String log) {
return log(LogFactory.getLog(log));
}
/**
* Sets the log used for caught exceptions
*/
public DeadLetterChannelBuilder log(Class log) {
return log(LogFactory.getLog(log));
}
/**
* Sets the exception policy to use
*/
public DeadLetterChannelBuilder exceptionPolicyStrategy(ExceptionPolicyStrategy exceptionPolicyStrategy) {
setExceptionPolicyStrategy(exceptionPolicyStrategy);
return this;
}
// Properties
// -------------------------------------------------------------------------
public RedeliveryPolicy getRedeliveryPolicy() {
return redeliveryPolicy;
}
/**
* Sets the redelivery policy
*/
public void setRedeliveryPolicy(RedeliveryPolicy redeliveryPolicy) {
this.redeliveryPolicy = redeliveryPolicy;
}
public ProcessorFactory getDeadLetterFactory() {
if (deadLetterFactory == null) {
deadLetterFactory = new ProcessorFactory() {
public Processor createProcessor() {
return getDefaultDeadLetterEndpoint();
}
};
}
return deadLetterFactory;
}
/**
* Sets the default dead letter queue factory
*/
public void setDeadLetterFactory(ProcessorFactory deadLetterFactory) {
this.deadLetterFactory = deadLetterFactory;
}
public Processor getDefaultDeadLetterEndpoint() {
if (defaultDeadLetterEndpoint == null) {
defaultDeadLetterEndpoint = new RecipientList(getDefaultDeadLetterEndpointExpression());
}
return defaultDeadLetterEndpoint;
}
/**
* Sets the default dead letter endpoint used
*/
public void setDefaultDeadLetterEndpoint(Processor defaultDeadLetterEndpoint) {
this.defaultDeadLetterEndpoint = defaultDeadLetterEndpoint;
}
public Expression getDefaultDeadLetterEndpointExpression() {
if (defaultDeadLetterEndpointExpression == null) {
defaultDeadLetterEndpointExpression = ExpressionBuilder
.constantExpression(getDefaultDeadLetterEndpointUri());
}
return defaultDeadLetterEndpointExpression;
}
/**
* Sets the expression used to decide the dead letter channel endpoint for
* an exchange if no factory is provided via
* {@link #setDeadLetterFactory(ProcessorFactory)}
*/
public void setDefaultDeadLetterEndpointExpression(Expression defaultDeadLetterEndpointExpression) {
this.defaultDeadLetterEndpointExpression = defaultDeadLetterEndpointExpression;
}
public String getDefaultDeadLetterEndpointUri() {
return defaultDeadLetterEndpointUri;
}
/**
* Sets the default dead letter endpoint URI used if no factory is provided
* via {@link #setDeadLetterFactory(ProcessorFactory)} and no expression is
* provided via {@link #setDefaultDeadLetterEndpointExpression(Expression)}
*
* @param defaultDeadLetterEndpointUri the default URI if no deadletter
* factory or expression is provided
*/
public void setDefaultDeadLetterEndpointUri(String defaultDeadLetterEndpointUri) {
this.defaultDeadLetterEndpointUri = defaultDeadLetterEndpointUri;
}
public Logger getLogger() {
return logger;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
/**
* Sets the exception policy strategy to use for resolving the {@link org.apache.camel.model.ExceptionType}
* to use for a given thrown exception
*/
public ExceptionPolicyStrategy getExceptionPolicyStrategy() {
return exceptionPolicyStrategy;
}
public void setExceptionPolicyStrategy(ExceptionPolicyStrategy exceptionPolicyStrategy) {
this.exceptionPolicyStrategy = exceptionPolicyStrategy;
}
@Override
public String toString() {
return "DeadLetterChannelBuilder(" + (deadLetterFactory != null ? deadLetterFactory : defaultDeadLetterEndpoint) + ")";
}
}
|
Added missing fluent builders to DLC for Java DSL
git-svn-id: e3ccc80b644512be24afa6caf639b2d1f1969354@731256 13f79535-47bb-0310-9956-ffa450edef68
|
camel-core/src/main/java/org/apache/camel/builder/DeadLetterChannelBuilder.java
|
Added missing fluent builders to DLC for Java DSL
|
|
Java
|
apache-2.0
|
ed7f64895736463e16d2e55d22b17e7978a1d105
| 0
|
nicolargo/intellij-community,signed/intellij-community,slisson/intellij-community,ibinti/intellij-community,petteyg/intellij-community,clumsy/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,retomerz/intellij-community,petteyg/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,allotria/intellij-community,da1z/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,da1z/intellij-community,retomerz/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,kool79/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,semonte/intellij-community,retomerz/intellij-community,xfournet/intellij-community,fnouama/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,kool79/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,semonte/intellij-community,diorcety/intellij-community,samthor/intellij-community,apixandru/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,holmes/intellij-community,samthor/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,signed/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,semonte/intellij-community,kool79/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,slisson/intellij-community,vladmm/intellij-community,hurricup/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,ernestp/consulo,vvv1559/intellij-community,wreckJ/intellij-community,kool79/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,izonder/intellij-community,hurricup/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,consulo/consulo,TangHao1987/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,amith01994/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,allotria/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,petteyg/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,kdwink/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,allotria/intellij-community,retomerz/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,ibinti/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ibinti/intellij-community,allotria/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,samthor/intellij-community,caot/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,Distrotech/intellij-community,holmes/intellij-community,asedunov/intellij-community,slisson/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,holmes/intellij-community,ibinti/intellij-community,holmes/intellij-community,amith01994/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,xfournet/intellij-community,caot/intellij-community,salguarnieri/intellij-community,caot/intellij-community,gnuhub/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,allotria/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,supersven/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,fitermay/intellij-community,caot/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,FHannes/intellij-community,vladmm/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,kdwink/intellij-community,fitermay/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,samthor/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,ernestp/consulo,fnouama/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,fnouama/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,slisson/intellij-community,allotria/intellij-community,nicolargo/intellij-community,consulo/consulo,petteyg/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,allotria/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,semonte/intellij-community,semonte/intellij-community,fnouama/intellij-community,retomerz/intellij-community,slisson/intellij-community,ernestp/consulo,orekyuu/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,signed/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,supersven/intellij-community,orekyuu/intellij-community,ernestp/consulo,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,semonte/intellij-community,kdwink/intellij-community,izonder/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,slisson/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,vladmm/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,caot/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,blademainer/intellij-community,ernestp/consulo,hurricup/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,consulo/consulo,xfournet/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,izonder/intellij-community,izonder/intellij-community,clumsy/intellij-community,petteyg/intellij-community,supersven/intellij-community,samthor/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,fnouama/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,diorcety/intellij-community,vladmm/intellij-community,clumsy/intellij-community,blademainer/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,fitermay/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,consulo/consulo,kool79/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,da1z/intellij-community,jagguli/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,caot/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,caot/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,adedayo/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,izonder/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,ernestp/consulo,semonte/intellij-community,holmes/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,holmes/intellij-community,adedayo/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,ibinti/intellij-community,supersven/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,holmes/intellij-community,kdwink/intellij-community,apixandru/intellij-community,slisson/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,vladmm/intellij-community,da1z/intellij-community,robovm/robovm-studio,hurricup/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,signed/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,hurricup/intellij-community,blademainer/intellij-community,asedunov/intellij-community,caot/intellij-community,Distrotech/intellij-community,signed/intellij-community,ryano144/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,caot/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,kool79/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,consulo/consulo,TangHao1987/intellij-community,gnuhub/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,blademainer/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,signed/intellij-community,supersven/intellij-community,hurricup/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,vladmm/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,supersven/intellij-community,allotria/intellij-community,clumsy/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,signed/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,kool79/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,vladmm/intellij-community,slisson/intellij-community,clumsy/intellij-community,clumsy/intellij-community,kdwink/intellij-community,adedayo/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,ibinti/intellij-community,samthor/intellij-community,clumsy/intellij-community,allotria/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,dslomov/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,semonte/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,petteyg/intellij-community,petteyg/intellij-community,vladmm/intellij-community,semonte/intellij-community
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.vfs;
import com.intellij.ProjectTopics;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandAdapter;
import com.intellij.openapi.command.CommandEvent;
import com.intellij.openapi.command.CommandListener;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootEvent;
import com.intellij.openapi.roots.ModuleRootListener;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vcs.ProjectLevelVcsManager;
import com.intellij.openapi.vcs.VcsDirectoryMapping;
import com.intellij.openapi.vcs.VcsListener;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.ex.VirtualFileManagerAdapter;
import com.intellij.openapi.vfs.ex.VirtualFileManagerEx;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import git4idea.GitUtil;
import git4idea.GitVcs;
import git4idea.i18n.GitBundle;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The component tracks Git roots for the project. If roots are mapped incorrectly it
* shows balloon that notifies user about the problem and offers to correct root mapping.
*/
public class GitRootTracker implements VcsListener {
/**
* The context project
*/
private final Project myProject;
/**
* Tracker of roots for project root manager
*/
private final ProjectRootManager myProjectRoots;
/**
* The vcs manager that tracks content roots
*/
private final ProjectLevelVcsManager myVcsManager;
/**
* The vcs instance
*/
private final GitVcs myVcs;
/**
* If true, the tracking is enabled.
*/
private final AtomicBoolean myIsEnabled = new AtomicBoolean(false);
/**
* If true, the root configuration has been possibly invalidated
*/
private final AtomicBoolean myRootsInvalidated = new AtomicBoolean(true);
/**
* If true, there are some configured git roots, or listener has never been run yet
*/
private final AtomicBoolean myHasGitRoots = new AtomicBoolean(true);
/**
* If true, the notification is currently active and has not been dismissed yet.
*/
private final AtomicBoolean myNotificationPosted = new AtomicBoolean(false);
private final MergingUpdateQueue myQueue;
private Notification myNotification;
/**
* The invalid git roots
*/
private static final String GIT_INVALID_ROOTS_ID = "Git";
/**
* The command listener
*/
private final CommandListener myCommandListener;
/**
* The file listener
*/
private final MyFileListener myFileListener;
/**
* Listener for refresh events
*/
private final VirtualFileManagerAdapter myVirtualFileManagerListener;
/**
* Local file system service
*/
private final LocalFileSystem myLocalFileSystem;
/**
* The multicaster for root events
*/
private final GitRootsListener myMulticaster;
private final MessageBusConnection myMessageBusConnection;
/**
* The constructor
*
* @param project the project instance
* @param multicaster the listeners to notify
*/
public GitRootTracker(GitVcs vcs, @NotNull Project project, @NotNull GitRootsListener multicaster) {
myMulticaster = multicaster;
if (project.isDefault()) {
throw new IllegalArgumentException("The project must not be default");
}
myProject = project;
myProjectRoots = ProjectRootManager.getInstance(myProject);
myQueue = new MergingUpdateQueue("queue", 500, true, null, project, null, false);
myVcs = vcs;
myVcsManager = ProjectLevelVcsManager.getInstance(project);
myVcsManager.addVcsListener(this);
myLocalFileSystem = LocalFileSystem.getInstance();
myMessageBusConnection = myProject.getMessageBus().connect();
myMessageBusConnection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() {
public void beforeRootsChange(ModuleRootEvent event) {
// do nothing
}
public void rootsChanged(ModuleRootEvent event) {
invalidate();
}
});
myCommandListener = new CommandAdapter() {
@Override
public void commandFinished(CommandEvent event) {
if (!myRootsInvalidated.compareAndSet(true, false)) {
return;
}
scheduleRootsCheck(false);
}
};
CommandProcessor.getInstance().addCommandListener(myCommandListener);
myFileListener = new MyFileListener();
VirtualFileManagerEx fileManager = (VirtualFileManagerEx)VirtualFileManager.getInstance();
fileManager.addVirtualFileListener(myFileListener);
myVirtualFileManagerListener = new VirtualFileManagerAdapter() {
@Override
public void afterRefreshFinish(boolean asynchronous) {
if (!myRootsInvalidated.compareAndSet(true, false)) {
return;
}
scheduleRootsCheck(false);
}
};
fileManager.addVirtualFileManagerListener(myVirtualFileManagerListener);
StartupManager.getInstance(myProject).runWhenProjectIsInitialized(new Runnable() {
public void run() {
myIsEnabled.set(true);
scheduleRootsCheck(true);
}
});
}
/**
* Dispose the component removing all related listeners
*/
public void dispose() {
myVcsManager.removeVcsListener(this);
myMessageBusConnection.disconnect();
CommandProcessor.getInstance().removeCommandListener(myCommandListener);
VirtualFileManagerEx fileManager = (VirtualFileManagerEx)VirtualFileManager.getInstance();
fileManager.removeVirtualFileListener(myFileListener);
fileManager.removeVirtualFileManagerListener(myVirtualFileManagerListener);
}
/**
* {@inheritDoc}
*/
public void directoryMappingChanged() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
scheduleRootsCheck(true);
}
});
}
private void scheduleRootsCheck(final boolean rootsChanged) {
if (ApplicationManager.getApplication().isUnitTestMode() || ApplicationManager.getApplication().isHeadlessEnvironment()) {
doCheckRoots(rootsChanged);
return;
}
myQueue.queue(new Update("root check") {
public void run() {
if (myProject.isDisposed()) return;
doCheckRoots(rootsChanged);
}
});
}
/**
* Check roots for changes.
*
* @param rootsChanged
*/
private void doCheckRoots(boolean rootsChanged) {
if (!myIsEnabled.get() || (!rootsChanged && !myHasGitRoots.get())) {
return;
}
final HashSet<VirtualFile> rootSet = new HashSet<VirtualFile>();
boolean hasInvalidRoots = ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
public Boolean compute() {
for (VcsDirectoryMapping m : myVcsManager.getDirectoryMappings()) {
if (!m.getVcs().equals(myVcs.getName())) {
continue;
}
String path = m.getDirectory();
if (path.length() == 0) {
VirtualFile baseDir = myProject.getBaseDir();
assert baseDir != null;
path = baseDir.getPath();
}
VirtualFile root = lookupFile(path);
VirtualFile actual = GitUtil.gitRootOrNull(root);
if (root == null || rootSet.contains(root) || actual != root) {
return true;
}
rootSet.add(root);
}
return false;
}
});
if (!hasInvalidRoots && rootSet.isEmpty()) {
myHasGitRoots.set(false);
return;
}
else {
myHasGitRoots.set(true);
}
if (!hasInvalidRoots) {
// check if roots have a problem
for (final VirtualFile root : rootSet) {
hasInvalidRoots = hasUnmappedSubroots(root, rootSet, 0);
if (hasInvalidRoots) {
break;
}
}
}
if (!hasInvalidRoots) {
// all roots are correct
if (myNotificationPosted.compareAndSet(true, false)) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
if (myNotification != null) {
if (!myNotification.isExpired()) {
myNotification.expire();
}
myNotification = null;
}
}
});
}
}
else if (myNotificationPosted.compareAndSet(false, true)) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
myNotification = new Notification(GIT_INVALID_ROOTS_ID, GitBundle.getString("root.tracker.message.title"),
GitBundle.getString("root.tracker.message"), NotificationType.ERROR,
new NotificationListener() {
public void hyperlinkUpdate(@NotNull final Notification notification,
@NotNull HyperlinkEvent event) {
fixRoots(notification);
}
});
Notifications.Bus.notify(myNotification, myProject);
}
});
}
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
myMulticaster.gitRootsChanged();
}
});
}
/**
* Check if there are some unmapped subdirectories under git
*
* @param directory the content root to check
* @param rootSet the mapped root set
* @param depth
*/
private static boolean hasUnmappedSubroots(final VirtualFile directory, final @NotNull HashSet<VirtualFile> rootSet, int depth) {
if (depth > 3) { // three is quite enough
return false;
}
VirtualFile[] children = ApplicationManager.getApplication().runReadAction(new Computable<VirtualFile[]>() {
public VirtualFile[] compute() {
return directory.isValid() ? directory.getChildren() : VirtualFile.EMPTY_ARRAY;
}
});
for (final VirtualFile child : children) {
if (!child.isDirectory()) {
continue;
}
if (child.getName().equals(".git")) {
return !rootSet.contains(child.getParent());
}
if (hasUnmappedSubroots(child, rootSet, depth + 1)) {
return true;
}
}
return false;
}
/**
* Fix mapped roots
* @param notification Expires the notification if roots are in the correct state after fix.
*/
private void fixRoots(final Notification notification) {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
@Override public void run() {
final List<VcsDirectoryMapping> vcsDirectoryMappings = new ArrayList<VcsDirectoryMapping>(myVcsManager.getDirectoryMappings());
final HashSet<String> mapped = new HashSet<String>();
final HashSet<String> removed = new HashSet<String>();
final HashSet<String> added = new HashSet<String>();
collectRoots(vcsDirectoryMappings, mapped, removed, added);
final VirtualFile baseDir = myProject.getBaseDir();
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override public void run() {
if (added.isEmpty() && removed.isEmpty()) {
Messages.showInfoMessage(myProject, GitBundle.message("fix.roots.valid.message"), GitBundle.message("fix.roots.valid.title"));
notification.expire();
return;
}
GitFixRootsDialog d = new GitFixRootsDialog(myProject, mapped, added, removed);
d.show();
if (!d.isOK()) {
return;
}
for (Iterator<VcsDirectoryMapping> i = vcsDirectoryMappings.iterator(); i.hasNext(); ) {
VcsDirectoryMapping m = i.next();
String path = m.getDirectory();
if (removed.contains(path) || (path.length() == 0 && baseDir != null && removed.contains(baseDir.getPath()))) {
i.remove();
}
}
for (String a : added) {
vcsDirectoryMappings.add(new VcsDirectoryMapping(a, myVcs.getName()));
}
myVcsManager.setDirectoryMappings(vcsDirectoryMappings);
myVcsManager.updateActiveVcss();
notification.expire();
}
});
}
});
}
private void collectRoots(List<VcsDirectoryMapping> vcsDirectoryMappings,
HashSet<String> mapped,
HashSet<String> removed,
HashSet<String> added) {
final VirtualFile baseDir = myProject.getBaseDir();
for (Iterator<VcsDirectoryMapping> i = vcsDirectoryMappings.iterator(); i.hasNext();) {
VcsDirectoryMapping m = i.next();
String vcsName = myVcs.getName();
if (!vcsName.equals(m.getVcs())) {
continue;
}
String path = m.getDirectory();
if (path.length() == 0 && baseDir != null) {
path = baseDir.getPath();
}
VirtualFile file = lookupFile(path);
if (file != null && !mapped.add(file.getPath())) {
// eliminate duplicates
i.remove();
continue;
}
final VirtualFile actual = GitUtil.gitRootOrNull(file);
if (file == null || actual == null) {
removed.add(path);
}
else if (actual != file) {
removed.add(path);
added.add(actual.getPath());
}
}
for (String m : mapped) {
VirtualFile file = lookupFile(m);
if (file == null) {
continue;
}
addSubroots(file, added, mapped);
if (removed.contains(m)) {
continue;
}
VirtualFile root = GitUtil.gitRootOrNull(file);
assert root != null;
for (String o : mapped) {
// the mapped collection is not modified here, so order is being kept
if (o.equals(m) || removed.contains(o)) {
continue;
}
if (o.startsWith(m)) {
VirtualFile otherFile = lookupFile(m);
assert otherFile != null;
VirtualFile otherRoot = GitUtil.gitRootOrNull(otherFile);
assert otherRoot != null;
if (otherRoot == root) {
removed.add(o);
}
else if (otherFile != otherRoot) {
added.add(otherRoot.getPath());
removed.add(o);
}
}
}
}
}
/**
* Look up file in the file system
*
* @param path the path to lookup
* @return the file or null if the file not found
*/
@Nullable
private VirtualFile lookupFile(String path) {
return myLocalFileSystem.findFileByPath(path);
}
/**
* Add subroots for the content root
*
* @param directory the content root to check
* @param toAdd collection of roots to be added
* @param mapped all mapped git roots
*/
private static void addSubroots(VirtualFile directory, HashSet<String> toAdd, HashSet<String> mapped) {
for (VirtualFile child : directory.getChildren()) {
if (!child.isDirectory()) {
continue;
}
if (child.getName().equals(".git") && !mapped.contains(directory.getPath())) {
toAdd.add(directory.getPath());
}
else {
addSubroots(child, toAdd, mapped);
}
}
}
/**
* Invalidate git root
*/
private void invalidate() {
myRootsInvalidated.set(true);
}
/**
* The listener for git roots
*/
private class MyFileListener extends VirtualFileAdapter {
/**
* Return true if file has git repositories
*
* @param file the file to check
* @return true if file has git repositories
*/
private boolean hasGitRepositories(VirtualFile file) {
if (!file.isDirectory() || !file.getName().equals(".git")) {
return false;
}
VirtualFile baseDir = myProject.getBaseDir();
if (baseDir == null) {
return false;
}
if (!VfsUtil.isAncestor(baseDir, file, false)) {
boolean isUnder = false;
for (VirtualFile c : myProjectRoots.getContentRoots()) {
if (!VfsUtil.isAncestor(baseDir, c, false) && VfsUtil.isAncestor(c, file, false)) {
isUnder = true;
break;
}
}
if (!isUnder) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public void fileCreated(VirtualFileEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
/**
* {@inheritDoc}
*/
@Override
public void beforeFileDeletion(VirtualFileEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
/**
* {@inheritDoc}
*/
@Override
public void fileMoved(VirtualFileMoveEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
/**
* {@inheritDoc}
*/
@Override
public void fileCopied(VirtualFileCopyEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
}
}
|
plugins/git4idea/src/git4idea/vfs/GitRootTracker.java
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.vfs;
import com.intellij.ProjectTopics;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandAdapter;
import com.intellij.openapi.command.CommandEvent;
import com.intellij.openapi.command.CommandListener;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootEvent;
import com.intellij.openapi.roots.ModuleRootListener;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vcs.ProjectLevelVcsManager;
import com.intellij.openapi.vcs.VcsDirectoryMapping;
import com.intellij.openapi.vcs.VcsListener;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.ex.VirtualFileManagerAdapter;
import com.intellij.openapi.vfs.ex.VirtualFileManagerEx;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import git4idea.GitUtil;
import git4idea.GitVcs;
import git4idea.i18n.GitBundle;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The component tracks Git roots for the project. If roots are mapped incorrectly it
* shows balloon that notifies user about the problem and offers to correct root mapping.
*/
public class GitRootTracker implements VcsListener {
/**
* The context project
*/
private final Project myProject;
/**
* Tracker of roots for project root manager
*/
private final ProjectRootManager myProjectRoots;
/**
* The vcs manager that tracks content roots
*/
private final ProjectLevelVcsManager myVcsManager;
/**
* The vcs instance
*/
private final GitVcs myVcs;
/**
* If true, the tracking is enabled.
*/
private final AtomicBoolean myIsEnabled = new AtomicBoolean(false);
/**
* If true, the root configuration has been possibly invalidated
*/
private final AtomicBoolean myRootsInvalidated = new AtomicBoolean(true);
/**
* If true, there are some configured git roots, or listener has never been run yet
*/
private final AtomicBoolean myHasGitRoots = new AtomicBoolean(true);
/**
* If true, the notification is currently active and has not been dismissed yet.
*/
private final AtomicBoolean myNotificationPosted = new AtomicBoolean(false);
private final MergingUpdateQueue myQueue;
private Notification myNotification;
/**
* The invalid git roots
*/
private static final String GIT_INVALID_ROOTS_ID = "Git";
/**
* The command listener
*/
private final CommandListener myCommandListener;
/**
* The file listener
*/
private final MyFileListener myFileListener;
/**
* Listener for refresh events
*/
private final VirtualFileManagerAdapter myVirtualFileManagerListener;
/**
* Local file system service
*/
private final LocalFileSystem myLocalFileSystem;
/**
* The multicaster for root events
*/
private final GitRootsListener myMulticaster;
private final MessageBusConnection myMessageBusConnection;
/**
* The constructor
*
* @param project the project instance
* @param multicaster the listeners to notify
*/
public GitRootTracker(GitVcs vcs, @NotNull Project project, @NotNull GitRootsListener multicaster) {
myMulticaster = multicaster;
if (project.isDefault()) {
throw new IllegalArgumentException("The project must not be default");
}
myProject = project;
myProjectRoots = ProjectRootManager.getInstance(myProject);
myQueue = new MergingUpdateQueue("queue", 500, true, null, project, null, false);
myVcs = vcs;
myVcsManager = ProjectLevelVcsManager.getInstance(project);
myVcsManager.addVcsListener(this);
myLocalFileSystem = LocalFileSystem.getInstance();
myMessageBusConnection = myProject.getMessageBus().connect();
myMessageBusConnection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() {
public void beforeRootsChange(ModuleRootEvent event) {
// do nothing
}
public void rootsChanged(ModuleRootEvent event) {
invalidate();
}
});
myCommandListener = new CommandAdapter() {
@Override
public void commandFinished(CommandEvent event) {
if (!myRootsInvalidated.compareAndSet(true, false)) {
return;
}
scheduleRootsCheck(false);
}
};
CommandProcessor.getInstance().addCommandListener(myCommandListener);
myFileListener = new MyFileListener();
VirtualFileManagerEx fileManager = (VirtualFileManagerEx)VirtualFileManager.getInstance();
fileManager.addVirtualFileListener(myFileListener);
myVirtualFileManagerListener = new VirtualFileManagerAdapter() {
@Override
public void afterRefreshFinish(boolean asynchronous) {
if (!myRootsInvalidated.compareAndSet(true, false)) {
return;
}
scheduleRootsCheck(false);
}
};
fileManager.addVirtualFileManagerListener(myVirtualFileManagerListener);
StartupManager.getInstance(myProject).runWhenProjectIsInitialized(new Runnable() {
public void run() {
myIsEnabled.set(true);
scheduleRootsCheck(true);
}
});
}
/**
* Dispose the component removing all related listeners
*/
public void dispose() {
myVcsManager.removeVcsListener(this);
myMessageBusConnection.disconnect();
CommandProcessor.getInstance().removeCommandListener(myCommandListener);
VirtualFileManagerEx fileManager = (VirtualFileManagerEx)VirtualFileManager.getInstance();
fileManager.removeVirtualFileListener(myFileListener);
fileManager.removeVirtualFileManagerListener(myVirtualFileManagerListener);
}
/**
* {@inheritDoc}
*/
public void directoryMappingChanged() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
scheduleRootsCheck(true);
}
});
}
private void scheduleRootsCheck(final boolean rootsChanged) {
if (ApplicationManager.getApplication().isUnitTestMode() || ApplicationManager.getApplication().isHeadlessEnvironment()) {
doCheckRoots(rootsChanged);
return;
}
myQueue.queue(new Update("root check") {
public void run() {
if (myProject.isDisposed()) return;
doCheckRoots(rootsChanged);
}
});
}
/**
* Check roots for changes.
*
* @param rootsChanged
*/
private void doCheckRoots(boolean rootsChanged) {
if (!myIsEnabled.get() || (!rootsChanged && !myHasGitRoots.get())) {
return;
}
final HashSet<VirtualFile> rootSet = new HashSet<VirtualFile>();
boolean hasInvalidRoots = ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
public Boolean compute() {
for (VcsDirectoryMapping m : myVcsManager.getDirectoryMappings()) {
if (!m.getVcs().equals(myVcs.getName())) {
continue;
}
String path = m.getDirectory();
if (path.length() == 0) {
VirtualFile baseDir = myProject.getBaseDir();
assert baseDir != null;
path = baseDir.getPath();
}
VirtualFile root = lookupFile(path);
VirtualFile actual = GitUtil.gitRootOrNull(root);
if (root == null || rootSet.contains(root) || actual != root) {
return true;
}
rootSet.add(root);
}
return false;
}
});
if (!hasInvalidRoots && rootSet.isEmpty()) {
myHasGitRoots.set(false);
return;
}
else {
myHasGitRoots.set(true);
}
if (!hasInvalidRoots) {
// check if roots have a problem
for (final VirtualFile root : rootSet) {
hasInvalidRoots = hasUnmappedSubroots(root, rootSet, 0);
if (hasInvalidRoots) {
break;
}
}
}
if (!hasInvalidRoots) {
// all roots are correct
if (myNotificationPosted.compareAndSet(true, false)) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
if (myNotification != null) {
if (!myNotification.isExpired()) {
myNotification.expire();
}
myNotification = null;
}
}
});
}
}
else if (myNotificationPosted.compareAndSet(false, true)) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
myNotification = new Notification(GIT_INVALID_ROOTS_ID, GitBundle.getString("root.tracker.message.title"),
GitBundle.getString("root.tracker.message"), NotificationType.ERROR,
new NotificationListener() {
public void hyperlinkUpdate(@NotNull Notification notification,
@NotNull HyperlinkEvent event) {
if (fixRoots()) {
notification.expire();
}
}
});
Notifications.Bus.notify(myNotification, myProject);
}
});
}
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
myMulticaster.gitRootsChanged();
}
});
}
/**
* Check if there are some unmapped subdirectories under git
*
* @param directory the content root to check
* @param rootSet the mapped root set
* @param depth
*/
private static boolean hasUnmappedSubroots(final VirtualFile directory, final @NotNull HashSet<VirtualFile> rootSet, int depth) {
if (depth > 3) { // three is quite enough
return false;
}
VirtualFile[] children = ApplicationManager.getApplication().runReadAction(new Computable<VirtualFile[]>() {
public VirtualFile[] compute() {
return directory.isValid() ? directory.getChildren() : VirtualFile.EMPTY_ARRAY;
}
});
for (final VirtualFile child : children) {
if (!child.isDirectory()) {
continue;
}
if (child.getName().equals(".git")) {
return !rootSet.contains(child.getParent());
}
if (hasUnmappedSubroots(child, rootSet, depth + 1)) {
return true;
}
}
return false;
}
/**
* Fix mapped roots
*
* @return true if roots now in the correct state
*/
boolean fixRoots() {
final List<VcsDirectoryMapping> vcsDirectoryMappings = new ArrayList<VcsDirectoryMapping>(myVcsManager.getDirectoryMappings());
final HashSet<String> mapped = new HashSet<String>();
final HashSet<String> removed = new HashSet<String>();
final HashSet<String> added = new HashSet<String>();
final VirtualFile baseDir = myProject.getBaseDir();
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
for (Iterator<VcsDirectoryMapping> i = vcsDirectoryMappings.iterator(); i.hasNext();) {
VcsDirectoryMapping m = i.next();
String vcsName = myVcs.getName();
if (!vcsName.equals(m.getVcs())) {
continue;
}
String path = m.getDirectory();
if (path.length() == 0 && baseDir != null) {
path = baseDir.getPath();
}
VirtualFile file = lookupFile(path);
if (file != null && !mapped.add(file.getPath())) {
// eliminate duplicates
i.remove();
continue;
}
final VirtualFile actual = GitUtil.gitRootOrNull(file);
if (file == null || actual == null) {
removed.add(path);
}
else if (actual != file) {
removed.add(path);
added.add(actual.getPath());
}
}
for (String m : mapped) {
VirtualFile file = lookupFile(m);
if (file == null) {
continue;
}
addSubroots(file, added, mapped);
if (removed.contains(m)) {
continue;
}
VirtualFile root = GitUtil.gitRootOrNull(file);
assert root != null;
for (String o : mapped) {
// the mapped collection is not modified here, so order is being kept
if (o.equals(m) || removed.contains(o)) {
continue;
}
if (o.startsWith(m)) {
VirtualFile otherFile = lookupFile(m);
assert otherFile != null;
VirtualFile otherRoot = GitUtil.gitRootOrNull(otherFile);
assert otherRoot != null;
if (otherRoot == root) {
removed.add(o);
}
else if (otherFile != otherRoot) {
added.add(otherRoot.getPath());
removed.add(o);
}
}
}
}
}
});
if (added.isEmpty() && removed.isEmpty()) {
Messages.showInfoMessage(myProject, GitBundle.message("fix.roots.valid.message"), GitBundle.message("fix.roots.valid.title"));
return true;
}
GitFixRootsDialog d = new GitFixRootsDialog(myProject, mapped, added, removed);
d.show();
if (!d.isOK()) {
return false;
}
for (Iterator<VcsDirectoryMapping> i = vcsDirectoryMappings.iterator(); i.hasNext();) {
VcsDirectoryMapping m = i.next();
String path = m.getDirectory();
if (removed.contains(path) || (path.length() == 0 && baseDir != null && removed.contains(baseDir.getPath()))) {
i.remove();
}
}
for (String a : added) {
vcsDirectoryMappings.add(new VcsDirectoryMapping(a, myVcs.getName()));
}
myVcsManager.setDirectoryMappings(vcsDirectoryMappings);
myVcsManager.updateActiveVcss();
return true;
}
/**
* Look up file in the file system
*
* @param path the path to lookup
* @return the file or null if the file not found
*/
@Nullable
private VirtualFile lookupFile(String path) {
return myLocalFileSystem.findFileByPath(path);
}
/**
* Add subroots for the content root
*
* @param directory the content root to check
* @param toAdd collection of roots to be added
* @param mapped all mapped git roots
*/
private static void addSubroots(VirtualFile directory, HashSet<String> toAdd, HashSet<String> mapped) {
for (VirtualFile child : directory.getChildren()) {
if (!child.isDirectory()) {
continue;
}
if (child.getName().equals(".git") && !mapped.contains(directory.getPath())) {
toAdd.add(directory.getPath());
}
else {
addSubroots(child, toAdd, mapped);
}
}
}
/**
* Invalidate git root
*/
private void invalidate() {
myRootsInvalidated.set(true);
}
/**
* The listener for git roots
*/
private class MyFileListener extends VirtualFileAdapter {
/**
* Return true if file has git repositories
*
* @param file the file to check
* @return true if file has git repositories
*/
private boolean hasGitRepositories(VirtualFile file) {
if (!file.isDirectory() || !file.getName().equals(".git")) {
return false;
}
VirtualFile baseDir = myProject.getBaseDir();
if (baseDir == null) {
return false;
}
if (!VfsUtil.isAncestor(baseDir, file, false)) {
boolean isUnder = false;
for (VirtualFile c : myProjectRoots.getContentRoots()) {
if (!VfsUtil.isAncestor(baseDir, c, false) && VfsUtil.isAncestor(c, file, false)) {
isUnder = true;
break;
}
}
if (!isUnder) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public void fileCreated(VirtualFileEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
/**
* {@inheritDoc}
*/
@Override
public void beforeFileDeletion(VirtualFileEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
/**
* {@inheritDoc}
*/
@Override
public void fileMoved(VirtualFileMoveEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
/**
* {@inheritDoc}
*/
@Override
public void fileCopied(VirtualFileCopyEvent event) {
if (!myHasGitRoots.get()) {
return;
}
if (hasGitRepositories(event.getFile())) {
invalidate();
}
}
}
}
|
Git IDEA-72767 find roots on a pooled thread.
|
plugins/git4idea/src/git4idea/vfs/GitRootTracker.java
|
Git IDEA-72767 find roots on a pooled thread.
|
|
Java
|
apache-2.0
|
9dda995c81d676713a5309501ab3083721b092b7
| 0
|
opensciencegrid/oim,opensciencegrid/oim,opensciencegrid/oim,opensciencegrid/oim,opensciencegrid/oim
|
package edu.iu.grid.oim.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import com.divrep.DivRep;
import com.divrep.DivRepEvent;
import com.divrep.DivRepEventListener;
import com.divrep.DivRepRoot;
import com.divrep.common.DivRepButton;
import edu.iu.grid.oim.lib.StaticConfig;
import edu.iu.grid.oim.model.Context;
import edu.iu.grid.oim.model.MenuItem;
import edu.iu.grid.oim.model.db.ContactModel;
import edu.iu.grid.oim.model.db.record.ContactRecord;
import edu.iu.grid.oim.view.ContactAssociationView;
import edu.iu.grid.oim.view.ContentView;
import edu.iu.grid.oim.view.DivRepWrapper;
import edu.iu.grid.oim.view.GenericView;
import edu.iu.grid.oim.view.HtmlView;
import edu.iu.grid.oim.view.MenuView;
import edu.iu.grid.oim.view.Page;
import edu.iu.grid.oim.view.IView;
import edu.iu.grid.oim.view.SideContentView;
public class HomeServlet extends ServletBase {
private static final long serialVersionUID = 1L;
static Logger log = Logger.getLogger(HomeServlet.class);
public HomeServlet() {
super();
// TODO Auto-generated constructor stub
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
MenuView menuview = new MenuView(context, "home");
ContentView contentview;
contentview = createContentView();
Page page = new Page(context, menuview, contentview, createSideView());
page.render(response.getWriter());
}
protected ContentView createContentView() throws ServletException
{
ContentView contentview = new ContentView();
contentview.add(new HtmlView("<h1>OIM Home</h1>"));
// TODO agopu: need to clean this up with some divs etc. Nicer font, etc.
String welcome_string = "<p>Welcome to the OSG Information Management System.</p>";
if(auth.isGuest()) {
welcome_string += "<p>Please provide an X509 certificate issued by an <a href='http://software.grid.iu.edu/cadist/'>OSG-approved Certifying Authority (CA)</a> via your web browser in order to use or register to OIM.</p>";
} else {
welcome_string += "<p>In the menu along the top, you will find options for registering or updating information for various OSG entities.</p>";
}
welcome_string += "<p>Please see Help page for more information.</p>";
contentview.add(new HtmlView(welcome_string));
//add confirmation button
if(auth.isOIMUser()) {
try {
contentview.add(new DivRepWrapper(new Confirmation(auth.getContactID(), context)));
} catch (SQLException e) {
log.error(e);
}
}
//show entities that this user is associated
if(auth.isOIMUser()) {
contentview.add(new HtmlView("<h2>Associated Entities</h2>"));
contentview.add(new HtmlView("<p>Following entities are associated with your contact</p>"));
try {
contentview.add(new ContactAssociationView(context, auth.getContactID()));
} catch (SQLException e) {
throw new ServletException(e);
}
}
//show oim hierarchy doc
contentview.add(new HtmlView("<h2>OSG Topology used by OIM</h2>"));
contentview.add(new HtmlView("<p>This presentation walk through various entities within OIM hierarchy and describes their relationship.</p>"));
contentview.add(new HtmlView("<iframe src=\"http://docs.google.com/present/embed?id=ddtgc5bt_113fp3fmvgp&size=l\" frameborder=\"0\" width=\"700\" height=\"559\"></iframe>"));
return contentview;
}
private SideContentView createSideView()
{
SideContentView view = new SideContentView();
return view;
}
class Confirmation extends DivRep
{
final ContactRecord crec;
final ContactModel cmodel;
final Context context;
public Confirmation(Integer contact_id, Context _context) throws SQLException {
super(_context.getPageRoot());
cmodel = new ContactModel(_context);
crec = (ContactRecord) cmodel.get(contact_id);//.clone();
context = _context;
}
protected void onEvent(DivRepEvent e) {
// TODO Auto-generated method stub
}
public void render(PrintWriter out) {
if(crec.isConfirmationExpired()) {
out.write("<div id=\""+getNodeID()+"\">");
out.write("<h3>Content Confirmation</h3>");
out.write("<p class=\"divrep_round divrep_elementerror\">You have not recently confirmed that your information in OIM is current</p>");
out.write("<p>The last time you confirmed your profile information was "+crec.confirmed.toString()+"</p>");
out.write("<p>Please go to the ");
out.write("<a href=\""+StaticConfig.getApplicationBase()+"/profileedit"+"\">profile</a>");
out.write(" page to check your profile information</p>");
out.write("</div>");
}
}
}
}
|
src/edu/iu/grid/oim/servlet/HomeServlet.java
|
package edu.iu.grid.oim.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import com.divrep.DivRep;
import com.divrep.DivRepEvent;
import com.divrep.DivRepEventListener;
import com.divrep.DivRepRoot;
import com.divrep.common.DivRepButton;
import edu.iu.grid.oim.lib.StaticConfig;
import edu.iu.grid.oim.model.Context;
import edu.iu.grid.oim.model.MenuItem;
import edu.iu.grid.oim.model.db.ContactModel;
import edu.iu.grid.oim.model.db.record.ContactRecord;
import edu.iu.grid.oim.view.ContactAssociationView;
import edu.iu.grid.oim.view.ContentView;
import edu.iu.grid.oim.view.DivRepWrapper;
import edu.iu.grid.oim.view.GenericView;
import edu.iu.grid.oim.view.HtmlView;
import edu.iu.grid.oim.view.MenuView;
import edu.iu.grid.oim.view.Page;
import edu.iu.grid.oim.view.IView;
import edu.iu.grid.oim.view.SideContentView;
public class HomeServlet extends ServletBase {
private static final long serialVersionUID = 1L;
static Logger log = Logger.getLogger(HomeServlet.class);
public HomeServlet() {
super();
// TODO Auto-generated constructor stub
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
MenuView menuview = new MenuView(context, "home");
ContentView contentview;
contentview = createContentView();
Page page = new Page(context, menuview, contentview, createSideView());
page.render(response.getWriter());
}
protected ContentView createContentView() throws ServletException
{
ContentView contentview = new ContentView();
contentview.add(new HtmlView("<h1>OIM Home</h1>"));
// TODO agopu: need to clean this up with some divs etc. Nicer font, etc.
String welcome_string = "<p>Welcome to the OSG Information Management System.</p>";
if(auth.isGuest()) {
welcome_string += "<p>Please provide a IGTF approved certificate via your web browser in order to use or register to OIM.</p>";
} else {
welcome_string += "<p>In the menu along the top, you will find options for registering or updating information for various OSG entities.</p>";
}
welcome_string += "<p>Please see Help page for more information.</p>";
contentview.add(new HtmlView(welcome_string));
//add confirmation button
if(auth.isOIMUser()) {
try {
contentview.add(new DivRepWrapper(new Confirmation(auth.getContactID(), context)));
} catch (SQLException e) {
log.error(e);
}
}
//show entities that this user is associated
if(auth.isOIMUser()) {
contentview.add(new HtmlView("<h2>Associated Entities</h2>"));
contentview.add(new HtmlView("<p>Following entities are associated with your contact</p>"));
try {
contentview.add(new ContactAssociationView(context, auth.getContactID()));
} catch (SQLException e) {
throw new ServletException(e);
}
}
//show oim hierarchy doc
contentview.add(new HtmlView("<h2>OSG Topology used by OIM</h2>"));
contentview.add(new HtmlView("<p>This presentation walk through various entities within OIM hierarchy and describes their relationship.</p>"));
contentview.add(new HtmlView("<iframe src=\"http://docs.google.com/present/embed?id=ddtgc5bt_113fp3fmvgp&size=l\" frameborder=\"0\" width=\"700\" height=\"559\"></iframe>"));
return contentview;
}
private SideContentView createSideView()
{
SideContentView view = new SideContentView();
return view;
}
class Confirmation extends DivRep
{
final ContactRecord crec;
final ContactModel cmodel;
final Context context;
public Confirmation(Integer contact_id, Context _context) throws SQLException {
super(_context.getPageRoot());
cmodel = new ContactModel(_context);
crec = (ContactRecord) cmodel.get(contact_id);//.clone();
context = _context;
}
protected void onEvent(DivRepEvent e) {
// TODO Auto-generated method stub
}
public void render(PrintWriter out) {
if(crec.isConfirmationExpired()) {
out.write("<div id=\""+getNodeID()+"\">");
out.write("<h3>Content Confirmation</h3>");
out.write("<p class=\"divrep_round divrep_elementerror\">You have not recently confirmed that your information in OIM is current</p>");
out.write("<p>The last time you confirmed your profile information was "+crec.confirmed.toString()+"</p>");
out.write("<p>Please go to the ");
out.write("<a href=\""+StaticConfig.getApplicationBase()+"/profileedit"+"\">profile</a>");
out.write(" page to check your profile information</p>");
out.write("</div>");
}
}
}
}
|
updated the welcome note for user without certificate (patched on previous release)
|
src/edu/iu/grid/oim/servlet/HomeServlet.java
|
updated the welcome note for user without certificate (patched on previous release)
|
|
Java
|
apache-2.0
|
dfdb2229dddd71eb4f7b1e98116e7d7276cf7590
| 0
|
getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java
|
package org.lantern.state;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.GeneralSecurityException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jackson.map.ObjectMapper;
import org.lantern.JsonUtils;
import org.lantern.LanternConstants;
import org.lantern.Shutdownable;
import org.lantern.privacy.EncryptedFileService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.io.Files;
import com.google.inject.Provider;
public abstract class Storage<T> implements Provider<T>, Shutdownable {
private final Logger log = LoggerFactory.getLogger(getClass());
protected final EncryptedFileService encryptedFileService;
T obj;
private final File file;
private final File newFile;
private final Class<T> cls;
Storage(final EncryptedFileService encryptedFileService, File file, Class<T> cls) {
this.cls = cls;
this.encryptedFileService = encryptedFileService;
this.file = file;
this.newFile = new File(file.getAbsolutePath() + ".new");
}
protected abstract T blank();
@Override
public T get() {
return this.obj;
}
public synchronized void write(final T toWrite) {
if (LanternConstants.ON_APP_ENGINE) {
log.debug("Not writing on app engine");
return;
}
log.debug("Writing!");
OutputStream os = null;
try {
final String json = JsonUtils.jsonify(toWrite,
Model.Persistent.class);
// log.info("Writing JSON: \n{}", json);
os = encryptedFileService
.localEncryptOutputStream(this.newFile);
os.write(json.getBytes("UTF-8"));
IOUtils.closeQuietly(os);
Files.move(newFile, file);
} catch (final IOException e) {
log.error("Error encrypting stream", e);
} catch (final GeneralSecurityException e) {
log.error("Error encrypting stream", e);
}
}
static class ModelReadFailedException extends Exception {
private static final long serialVersionUID = 6572676909676411690L;
}
/**
* Reads the object from disk.
*
* @return The object instance as read from disk.
*/
public T read() throws ModelReadFailedException {
if (!file.isFile()) {
return blank();
}
final ObjectMapper mapper = new ObjectMapper();
InputStream is = null;
try {
is = encryptedFileService.localDecryptInputStream(file);
final String json = IOUtils.toString(is);
if (StringUtils.isBlank(json) || json.equalsIgnoreCase("null")) {
log.info("Can't build object from empty string");
return blank();
}
final T read = mapper.readValue(json, cls);
return read;
} catch (final IOException e) {
log.error("Could not read object", e);
} catch (final GeneralSecurityException e) {
log.error("Security error?", e);
} finally {
IOUtils.closeQuietly(is);
}
throw new ModelReadFailedException();
}
@Override
public void stop() {
write();
}
/**
* Serializes the current object.
*/
public void write() {
write(this.obj);
}
}
|
src/main/java/org/lantern/state/Storage.java
|
package org.lantern.state;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.GeneralSecurityException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jackson.map.ObjectMapper;
import org.lantern.JsonUtils;
import org.lantern.LanternConstants;
import org.lantern.Shutdownable;
import org.lantern.privacy.EncryptedFileService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.io.Files;
import com.google.inject.Provider;
public abstract class Storage<T> implements Provider<T>, Shutdownable {
private final Logger log = LoggerFactory.getLogger(getClass());
protected final EncryptedFileService encryptedFileService;
T obj;
private final File file;
private final File newFile;
private final Class<T> cls;
Storage(final EncryptedFileService encryptedFileService, File file, Class<T> cls) {
this.cls = cls;
this.encryptedFileService = encryptedFileService;
this.file = file;
this.newFile = new File(file.getAbsolutePath() + ".new");
}
protected abstract T blank();
@Override
public T get() {
return this.obj;
}
public void write(final T toWrite) {
if (LanternConstants.ON_APP_ENGINE) {
log.debug("Not writing on app engine");
return;
}
log.debug("Writing!");
OutputStream os = null;
try {
final String json = JsonUtils.jsonify(toWrite,
Model.Persistent.class);
// log.info("Writing JSON: \n{}", json);
os = encryptedFileService
.localEncryptOutputStream(this.newFile);
os.write(json.getBytes("UTF-8"));
IOUtils.closeQuietly(os);
Files.move(newFile, file);
} catch (final IOException e) {
log.error("Error encrypting stream", e);
} catch (final GeneralSecurityException e) {
log.error("Error encrypting stream", e);
}
}
static class ModelReadFailedException extends Exception {
private static final long serialVersionUID = 6572676909676411690L;
}
/**
* Reads the object from disk.
*
* @return The object instance as read from disk.
*/
public T read() throws ModelReadFailedException {
if (!file.isFile()) {
return blank();
}
final ObjectMapper mapper = new ObjectMapper();
InputStream is = null;
try {
is = encryptedFileService.localDecryptInputStream(file);
final String json = IOUtils.toString(is);
if (StringUtils.isBlank(json) || json.equalsIgnoreCase("null")) {
log.info("Can't build object from empty string");
return blank();
}
final T read = mapper.readValue(json, cls);
return read;
} catch (final IOException e) {
log.error("Could not read object", e);
} catch (final GeneralSecurityException e) {
log.error("Security error?", e);
} finally {
IOUtils.closeQuietly(is);
}
throw new ModelReadFailedException();
}
@Override
public void stop() {
write();
}
/**
* Serializes the current object.
*/
public void write() {
write(this.obj);
}
}
|
make write synchronized, so that we do not get weird errors. Fixes #665
|
src/main/java/org/lantern/state/Storage.java
|
make write synchronized, so that we do not get weird errors. Fixes #665
|
|
Java
|
apache-2.0
|
4c14303142c7cf9c939ab9e135d6e85606815acb
| 0
|
cuba-platform/cuba,dimone-kun/cuba,dimone-kun/cuba,cuba-platform/cuba,dimone-kun/cuba,cuba-platform/cuba
|
/*
* Copyright (c) 2008 Haulmont Technology Ltd. All Rights Reserved.
* Haulmont Technology proprietary and confidential.
* Use is subject to license terms.
* Author: Dmitry Abramov
* Created: 26.12.2008 10:02:53
* $Id$
*/
package com.haulmont.cuba.gui.xml;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.*;
public class ParametersHelper {
private static final Pattern QUERY_PARAMETERS_PATTERN = Pattern.compile(":([\\w\\.\\$]+)");
public static ParameterInfo[] parseQuery(String query) {
Set<ParameterInfo> infos = new HashSet<ParameterInfo>();
Matcher matcher = QUERY_PARAMETERS_PATTERN.matcher(query);
while (matcher.find()) {
final String parameterInfo = matcher.group();
final ParameterInfo info = parse(parameterInfo);
infos.add(info);
}
return infos.toArray(new ParameterInfo[infos.size()]);
}
public static class ParameterInfo {
public enum Type {
DATASOURCE("ds"),
COMPONENT("component"),
PARAM("param"),
SESSION("session"),
CUSTOM("custom");
private String prefix;
Type(String prefix) {
this.prefix = prefix;
}
public String getPrefix() {
return prefix;
}
}
private Type type;
private String path;
ParameterInfo(String name, Type type) {
this.path = name;
this.type = type;
}
public Type getType() {
return type;
}
public String getPath() {
return path;
}
public String getName() {
return (type.getPrefix() + "$" + path);
}
public String getFlatName() {
return (type.getPrefix() + "." + path).replaceAll("\\.", "_");
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParameterInfo that = (ParameterInfo) o;
return path.equals(that.path) && type == that.type;
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + path.hashCode();
return result;
}
}
public static ParameterInfo parse(String parameterInfo) {
if (parameterInfo.startsWith(":")) {
final String param = parameterInfo.substring(1);
final String[] strings = param.split("\\$");
if (strings.length != 2) {
throw new IllegalStateException(String.format("Illegal parameter info '%s'", parameterInfo));
}
final String source = strings[0];
final String name = strings[1];
for (ParameterInfo.Type type : ParameterInfo.Type.values()) {
if (type.prefix.equals(source)) {
return new ParameterInfo(name, type);
}
}
throw new IllegalStateException(String.format("Illegal parameter info '%s'", parameterInfo));
} else {
throw new IllegalStateException(String.format("Illegal parameter info '%s'", parameterInfo));
}
}
}
|
modules/gui/src/com/haulmont/cuba/gui/xml/ParametersHelper.java
|
/*
* Copyright (c) 2008 Haulmont Technology Ltd. All Rights Reserved.
* Haulmont Technology proprietary and confidential.
* Use is subject to license terms.
* Author: Dmitry Abramov
* Created: 26.12.2008 10:02:53
* $Id$
*/
package com.haulmont.cuba.gui.xml;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.*;
public class ParametersHelper {
private static final Pattern QUERY_PARAMETERS_PATTERN = Pattern.compile(":([\\w\\.\\$]+)");
public static ParameterInfo[] parseQuery(String query) {
Set<ParameterInfo> infos = new HashSet<ParameterInfo>();
Matcher matcher = QUERY_PARAMETERS_PATTERN.matcher(query);
while (matcher.find()) {
final String parameterInfo = matcher.group();
final ParameterInfo info = parse(parameterInfo);
infos.add(info);
}
return infos.toArray(new ParameterInfo[infos.size()]);
}
public static class ParameterInfo {
public enum Type {
DATASOURCE("ds"),
COMPONENT("component"),
PARAM("param"),
SESSION("session"),
CUSTOM("custom");
private String prefix;
Type(String prefix) {
this.prefix = prefix;
}
public String getPrefix() {
return prefix;
}
}
private Type type;
private String path;
ParameterInfo(String name, Type type) {
this.path = name;
this.type = type;
}
public Type getType() {
return type;
}
public String getPath() {
return path;
}
public String getName() {
return (type.getPrefix() + "$" + path);
}
public String getFlatName() {
return (type.getPrefix() + "." + path).replaceAll("\\.", "_");
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParameterInfo that = (ParameterInfo) o;
return path.equals(that.path) && type == that.type;
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + path.hashCode();
return result;
}
}
public static ParameterInfo parse(String parameterInfo) {
if (parameterInfo.startsWith(":")) {
final String param = parameterInfo.substring(1);
final String[] strings = param.split("\\$");
if (strings.length != 2) {
throw new IllegalStateException(String.format("Illegal parameter info '%s'", parameterInfo));
}
final String source = strings[0];
final String name = strings[1];
if (ParameterInfo.Type.DATASOURCE.prefix.equals(source)) {
return new ParameterInfo(name, ParameterInfo.Type.DATASOURCE);
} else if (ParameterInfo.Type.PARAM.prefix.equals(source)) {
return new ParameterInfo(name, ParameterInfo.Type.PARAM);
} else if (ParameterInfo.Type.COMPONENT.prefix.equals(source)) {
return new ParameterInfo(name, ParameterInfo.Type.COMPONENT);
} else if (ParameterInfo.Type.SESSION.prefix.equals(source)) {
return new ParameterInfo(name, ParameterInfo.Type.SESSION);
} else
throw new IllegalStateException(String.format("Illegal parameter info '%s'", parameterInfo));
} else {
throw new IllegalStateException(String.format("Illegal parameter info '%s'", parameterInfo));
}
}
}
|
fix ParametersHelper
|
modules/gui/src/com/haulmont/cuba/gui/xml/ParametersHelper.java
|
fix ParametersHelper
|
|
Java
|
bsd-3-clause
|
ea278c54baf29735bc13e796b6f673f5649924c9
| 0
|
wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy
|
/*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.nodes.calc;
import com.oracle.graal.api.meta.*;
import com.oracle.graal.graph.*;
import com.oracle.graal.graph.spi.*;
import com.oracle.graal.lir.gen.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.nodes.spi.*;
@NodeInfo(shortName = "%")
public class FloatRemNode extends FloatArithmeticNode implements Lowerable {
public FloatRemNode(ValueNode x, ValueNode y, boolean isStrictFP) {
super(x.stamp().unrestricted(), x, y, isStrictFP);
}
public Constant evalConst(Constant... inputs) {
assert inputs.length == 2;
assert inputs[0].getKind() == inputs[1].getKind();
if (inputs[0].getKind() == Kind.Float) {
return Constant.forFloat(inputs[0].asFloat() % inputs[1].asFloat());
} else {
assert inputs[0].getKind() == Kind.Double;
return Constant.forDouble(inputs[0].asDouble() % inputs[1].asDouble());
}
}
@Override
public ValueNode canonical(CanonicalizerTool tool, ValueNode forX, ValueNode forY) {
if (forX.isConstant() && forY.isConstant()) {
return ConstantNode.forPrimitive(evalConst(forX.asConstant(), forY.asConstant()));
}
return this;
}
@Override
public void lower(LoweringTool tool) {
tool.getLowerer().lower(this, tool);
}
@Override
public void generate(NodeMappableLIRBuilder builder, ArithmeticLIRGenerator gen) {
builder.setResult(this, gen.emitRem(builder.operand(getX()), builder.operand(getY()), null));
}
}
|
graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/calc/FloatRemNode.java
|
/*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.nodes.calc;
import com.oracle.graal.api.meta.*;
import com.oracle.graal.graph.*;
import com.oracle.graal.graph.spi.*;
import com.oracle.graal.lir.gen.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.nodes.spi.*;
@NodeInfo(shortName = "%")
public class FloatRemNode extends FloatArithmeticNode implements Lowerable {
public FloatRemNode(ValueNode x, ValueNode y, boolean isStrictFP) {
super(x.stamp().unrestricted(), x, y, isStrictFP);
}
public Constant evalConst(Constant... inputs) {
assert inputs.length == 2;
assert inputs[0].getKind() == inputs[1].getKind();
if (inputs[0].getKind() == Kind.Float) {
return Constant.forFloat(inputs[0].asFloat() % inputs[1].asFloat());
} else {
assert inputs[0].getKind() == Kind.Double;
return Constant.forDouble(inputs[0].asDouble() % inputs[1].asDouble());
}
}
@Override
public ValueNode canonical(CanonicalizerTool tool, ValueNode forX, ValueNode forY) {
if (forX.isConstant() && forY.isConstant()) {
return ConstantNode.forPrimitive(evalConst(getX().asConstant(), getY().asConstant()));
}
return this;
}
@Override
public void lower(LoweringTool tool) {
tool.getLowerer().lower(this, tool);
}
@Override
public void generate(NodeMappableLIRBuilder builder, ArithmeticLIRGenerator gen) {
builder.setResult(this, gen.emitRem(builder.operand(getX()), builder.operand(getY()), null));
}
}
|
fix FloatRemNode canonicalization
|
graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/calc/FloatRemNode.java
|
fix FloatRemNode canonicalization
|
|
Java
|
bsd-3-clause
|
c75e444faac547d651923ff8fa707af29bf5c92e
| 0
|
NCIP/nci-term-browser,NCIP/nci-term-browser,NCIP/nci-term-browser,NCIP/nci-term-browser
|
package gov.nih.nci.evs.browser.utils;
import java.util.*;
import java.sql.*;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.*;
import org.LexGrid.LexBIG.DataModel.Collections.*;
import org.LexGrid.LexBIG.DataModel.Core.*;
import org.LexGrid.LexBIG.Exceptions.*;
import org.LexGrid.LexBIG.Impl.*;
import org.LexGrid.LexBIG.LexBIGService.*;
import org.LexGrid.concepts.*;
import org.LexGrid.LexBIG.Utility.Iterators.*;
import org.LexGrid.codingSchemes.*;
import org.LexGrid.LexBIG.Utility.*;
import org.LexGrid.LexBIG.DataModel.Core.types.*;
import org.LexGrid.LexBIG.Extensions.Generic.*;
import org.LexGrid.naming.*;
import org.LexGrid.LexBIG.DataModel.InterfaceElements.*;
import org.LexGrid.commonTypes.*;
import gov.nih.nci.evs.browser.properties.*;
import gov.nih.nci.evs.browser.common.*;
import org.apache.commons.codec.language.*;
import org.apache.log4j.*;
import org.LexGrid.relations.Relations;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.Mapping.SearchContext;
import org.LexGrid.LexBIG.Extensions.Generic.*;
import org.LexGrid.LexBIG.Extensions.Generic.SupplementExtension;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.Direction;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.MappingSortOption;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.MappingSortOptionName;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.QualifierSortOption;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.Mapping;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.SearchDesignationOption;
import static gov.nih.nci.evs.browser.common.Constants.*;
import gov.nih.nci.evs.browser.bean.*;
/**
* <!-- LICENSE_TEXT_START -->
* Copyright 2008,2009 NGIT. This software was developed in conjunction
* with the National Cancer Institute, and so to the extent government
* employees are co-authors, any rights in such works shall be subject
* to Title 17 of the United States Code, section 105.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the disclaimer of Article 3,
* below. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 2. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by NGIT and the National
* Cancer Institute." If no such end-user documentation is to be
* included, this acknowledgment shall appear in the software itself,
* wherever such third-party acknowledgments normally appear.
* 3. The names "The National Cancer Institute", "NCI" and "NGIT" must
* not be used to endorse or promote products derived from this software.
* 4. This license does not authorize the incorporation of this software
* into any third party proprietary programs. This license does not
* authorize the recipient to use any trademarks owned by either NCI
* or NGIT
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE
* DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE,
* NGIT, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* <!-- LICENSE_TEXT_END -->
*/
/**
* @author EVS Team
* @version 1.0
*
* Modification history Initial implementation kim.ong@ngc.com
*
*/
public class MappingSearchUtils {
private static Logger _logger = Logger.getLogger(SearchUtils.class);
public MappingSearchUtils() {
}
public static String getMappingRelationsContainerName(String scheme, String version) {
CodingSchemeVersionOrTag versionOrTag =
new CodingSchemeVersionOrTag();
if (version != null) {
versionOrTag.setVersion(version);
}
String relationsContainerName = null;
try {
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
CodingScheme cs = lbSvc.resolveCodingScheme(scheme, versionOrTag);
if (cs == null) return null;
java.util.Enumeration<? extends Relations> relations = cs.enumerateRelations();
while (relations.hasMoreElements()) {
Relations relation = (Relations) relations.nextElement();
Boolean isMapping = relation.getIsMapping();
System.out.println("isMapping: " + isMapping);
if (isMapping != null && isMapping.equals(Boolean.TRUE)) {
relationsContainerName = relation.getContainerName();
break;
}
}
if (relationsContainerName == null) {
System.out.println("WARNING: Mapping container not found in " + scheme);
return null;
} else {
System.out.println("relationsContainerName " + relationsContainerName);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return relationsContainerName;
}
private CodedNodeSet.PropertyType[] getAllNonPresentationPropertyTypes() {
CodedNodeSet.PropertyType[] propertyTypes =
new CodedNodeSet.PropertyType[3];
propertyTypes[0] = PropertyType.COMMENT;
propertyTypes[1] = PropertyType.DEFINITION;
propertyTypes[2] = PropertyType.GENERIC;
return propertyTypes;
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByCode(scheme, version, matchText, matchAlgorithm, SearchContext.SOURCE_OR_TARGET_CODES, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
return searchByCode(
schemes, versions, matchText,
matchAlgorithm, SearchContext.SOURCE_OR_TARGET_CODES, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
String scheme, String version, String matchText,
String matchAlgorithm, SearchContext searchContext, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByCode(schemes, versions, matchText, matchAlgorithm, searchContext, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, SearchContext searchContext, int maxToReturn) {
System.out.println("============================== MappingSearchUtils searchByCode");
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByCode ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
System.out.println(scheme + " (version: " + version);
String containerName = getMappingRelationsContainerName(scheme, version);
System.out.println("\tcontainer name: " + containerName);
if (containerName != null) {
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
ConceptReferenceList codeList = new ConceptReferenceList();
ConceptReference ref = new ConceptReference();
ref.setConceptCode(matchText);
codeList.addConceptReference(ref);
mapping = mapping.restrictToCodes(codeList, searchContext);
itr = mapping.resolveMapping();
if (itr != null) {
try {
numberRemaining = itr.numberRemaining();
System.out.println("\tsearchByCode matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public ResolvedConceptReferencesIteratorWrapper searchByName(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByName(schemes, versions, matchText, matchAlgorithm, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByName(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByName ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
String containerName = getMappingRelationsContainerName(scheme, version);
if (containerName != null) {
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
mapping = mapping.restrictToMatchingDesignations(
matchText, SearchDesignationOption.ALL, matchAlgorithm, null, SearchContext.SOURCE_OR_TARGET_CODES
);
//Finally, resolve the Mapping.
itr = mapping.resolveMapping();
try {
numberRemaining = itr.numberRemaining();
System.out.println("Number of matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
//return null;
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public ResolvedConceptReferencesIteratorWrapper searchByProperties(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
System.out.println("searchByProperties scheme: " + scheme);
System.out.println("searchByProperties version: " + version);
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByProperties(schemes, versions, matchText, matchAlgorithm, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByProperties(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByName ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
CodedNodeSet.PropertyType[] propertyTypes = null;
LocalNameList propertyNames = null;
LocalNameList sourceList = null;
propertyTypes = getAllNonPresentationPropertyTypes();
LocalNameList contextList = null;
NameAndValueList qualifierList = null;
String language = null;
// to be modified
SearchContext searchContext = SearchContext.SOURCE_OR_TARGET_CODES
;
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
System.out.println("schemes.size(): " + schemes.size() + " lcv: " + lcv);
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
String containerName = getMappingRelationsContainerName(scheme, version);
if (containerName != null) {
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
mapping = mapping.restrictToMatchingProperties(
propertyNames,
propertyTypes,
sourceList,
contextList,
qualifierList,
matchAlgorithm,
language,
null,
searchContext);
//Finally, resolve the Mapping.
itr = mapping.resolveMapping();
try {
numberRemaining = itr.numberRemaining();
System.out.println("Number of matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
//return null;
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public LocalNameList getSupportedAssociationNames(LexBIGService lbSvc, String scheme,
String version, String containerName) {
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
if (version != null)
csvt.setVersion(version);
LocalNameList list = new LocalNameList();
try {
CodingScheme cs = lbSvc.resolveCodingScheme(scheme, csvt);
Relations[] relations = cs.getRelations();
for (int i = 0; i < relations.length; i++) {
Relations relation = relations[i];
_logger.debug("** getSupportedRoleNames containerName: "
+ relation.getContainerName());
if (relation.getContainerName().compareToIgnoreCase(containerName) == 0) {
org.LexGrid.relations.AssociationPredicate[] asso_array =
relation.getAssociationPredicate();
for (int j = 0; j < asso_array.length; j++) {
org.LexGrid.relations.AssociationPredicate association =
(org.LexGrid.relations.AssociationPredicate) asso_array[j];
// list.add(association.getAssociationName());
// KLO, 092209
//list.add(association.getForwardName());
list.addEntry(association.getAssociationName());
}
}
}
} catch (Exception ex) {
}
return list;
}
public ResolvedConceptReferencesIteratorWrapper searchByRelationships(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByRelationships(schemes, versions, matchText, matchAlgorithm, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByRelationships(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByName ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
SearchDesignationOption option = SearchDesignationOption.ALL;
String language = null;
CodedNodeSet.PropertyType[] propertyTypes = null;
LocalNameList propertyNames = null;
LocalNameList sourceList = null;
propertyTypes = getAllNonPresentationPropertyTypes();
LocalNameList contextList = null;
NameAndValueList qualifierList = null;
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
System.out.println("schemes.size(): " + schemes.size() + " lcv: " + lcv);
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
String containerName = getMappingRelationsContainerName(scheme, version);
if (containerName != null) {
LocalNameList relationshipList = getSupportedAssociationNames(lbSvc, scheme, version, containerName);
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
mapping = mapping.restrictToRelationship(
matchText,
option,
matchAlgorithm,
language,
relationshipList);
//Finally, resolve the Mapping.
itr = mapping.resolveMapping();
try {
numberRemaining = itr.numberRemaining();
System.out.println("Number of matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
//return null;
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public static ResolvedConceptReferencesIterator getRestrictedMappingDataIterator(String scheme, String version,
List<MappingSortOption> sortOptionList, ResolvedConceptReferencesIterator searchResultsIterator) {
return getRestrictedMappingDataIterator(scheme, version,
sortOptionList, searchResultsIterator, SearchContext.SOURCE_OR_TARGET_CODES);
}
public static ResolvedConceptReferencesIterator getRestrictedMappingDataIterator(String scheme, String version,
List<MappingSortOption> sortOptionList, ResolvedConceptReferencesIterator searchResultsIterator, SearchContext context) {
System.out.println("(***********) getRestrictedMappingDataIterator ...");
if (searchResultsIterator == null) return null;
try {
int numRemaining = searchResultsIterator.numberRemaining();
System.out.println("(***********) searchResultsIterator passing number of matches: " + numRemaining);
} catch (Exception e) {
System.out.println("searchResultsIterator.numberRemaining() throws exception???");
return null;
}
CodingSchemeVersionOrTag versionOrTag =
new CodingSchemeVersionOrTag();
if (version != null) {
versionOrTag.setVersion(version);
}
String relationsContainerName = null;
LexBIGService distributed = RemoteServerUtil.createLexBIGService();
try {
CodingScheme cs = distributed.resolveCodingScheme(scheme, versionOrTag);
if (cs == null) return null;
java.util.Enumeration<? extends Relations> relations = cs.enumerateRelations();
while (relations.hasMoreElements()) {
Relations relation = (Relations) relations.nextElement();
Boolean isMapping = relation.getIsMapping();
System.out.println("isMapping: " + isMapping);
if (isMapping != null && isMapping.equals(Boolean.TRUE)) {
relationsContainerName = relation.getContainerName();
break;
}
}
if (relationsContainerName == null) {
System.out.println("WARNING: Mapping container not found in " + scheme);
return null;
} else {
System.out.println("relationsContainerName " + relationsContainerName);
}
MappingExtension mappingExtension = (MappingExtension)
distributed.getGenericExtension("MappingExtension");
Mapping mapping =
mappingExtension.getMapping(scheme, versionOrTag, relationsContainerName);
//ConceptReferenceList codeList (to be derived based on ResolvedConceptReferencesIterator searchResultsIterator)
ConceptReferenceList codeList = new ConceptReferenceList();
System.out.println("getRestrictedMappingDataIterator Step 5 while loop -- retrieving refs");
if (searchResultsIterator != null) {
int lcv = 0;
while(searchResultsIterator.hasNext()){
ResolvedConceptReference[] refs = searchResultsIterator.next(100).getResolvedConceptReference();
for(ResolvedConceptReference ref : refs){
lcv++;
System.out.println("(" + lcv + ") " + ref.getEntityDescription().getContent() + "(" + ref.getCode() + ")");
codeList.addConceptReference((ConceptReference) ref);
}
}
} else {
System.out.println("resolved_value_set.jsp ResolvedConceptReferencesIterator == NULL???");
}
mapping = mapping.restrictToCodes(codeList, context);
ResolvedConceptReferencesIterator itr = mapping.resolveMapping(sortOptionList);
return itr;
} catch (Exception ex) {
//ex.printStackTrace();
System.out.println("getRestrictedMappingDataIterator throws exceptions???");
}
return null;
}
public List getMappingRelationship(
String scheme, String version, String code, int direction) {
SearchContext searchContext = SearchContext.SOURCE_OR_TARGET_CODES;
if (direction == 1) searchContext = SearchContext.SOURCE_CODES;
else if (direction == -1) searchContext = SearchContext.TARGET_CODES;
ResolvedConceptReferencesIteratorWrapper wrapper = searchByCode(
scheme, version, code, "exactMatch", searchContext, -1);
if (wrapper == null) return null;
ResolvedConceptReferencesIterator iterator = wrapper.getIterator();
if (iterator == null) return null;
int numberRemaining = 0;
try {
numberRemaining = iterator.numberRemaining();
if (numberRemaining == 0) {
return null;
}
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
MappingIteratorBean mappingIteratorBean = new MappingIteratorBean(
iterator,
numberRemaining, // number remaining
0, // istart
50, // iend,
numberRemaining, // size,
0, // pageNumber,
1); // numberPages
mappingIteratorBean.initialize(
iterator,
numberRemaining, // number remaining
0, // istart
50, // iend,
numberRemaining, // size,
0, // pageNumber,
1); // numberPages
return mappingIteratorBean.getData(0, numberRemaining); // implement getAll
}
/*
public static String TYPE_ROLE = "type_role";
public static String TYPE_ASSOCIATION = "type_association";
public static String TYPE_SUPERCONCEPT = "type_superconcept";
public static String TYPE_SUBCONCEPT = "type_subconcept";
public static String TYPE_INVERSE_ROLE = "type_inverse_role";
public static String TYPE_INVERSE_ASSOCIATION = "type_inverse_association";
*/
private String replaceAssociationNameByRela(AssociatedConcept ac,
String associationName) {
if (ac.getAssociationQualifiers() == null)
return associationName;
if (ac.getAssociationQualifiers().getNameAndValue() == null)
return associationName;
for (NameAndValue qual : ac.getAssociationQualifiers()
.getNameAndValue()) {
String qualifier_name = qual.getName();
String qualifier_value = qual.getContent();
if (qualifier_name.compareToIgnoreCase("rela") == 0) {
return qualifier_value; // replace associationName by Rela value
}
}
return associationName;
}
public HashMap getMappingRelationshipHashMap(String scheme, String version, String code) {
HashMap hmap = new HashMap();
HashMap map1 = getMappingRelationshipHashMap(scheme, version, code, 1);
ArrayList list = (ArrayList) map1.get(TYPE_ASSOCIATION);
if (list != null) {
hmap.put(TYPE_ASSOCIATION, list);
}
HashMap map2 = getMappingRelationshipHashMap(scheme, version, code, -1);
list = (ArrayList) map2.get(TYPE_INVERSE_ASSOCIATION);
if (list != null) {
hmap.put(TYPE_INVERSE_ASSOCIATION, list);
}
return hmap;
}
public HashMap getMappingRelationshipHashMap(
String scheme, String version, String code, int direction) {
System.out.println("========== Calling getMappingRelationshipHashMap direction " + direction);
SearchContext searchContext = SearchContext.SOURCE_OR_TARGET_CODES;
if (direction == 1) searchContext = SearchContext.SOURCE_CODES;
else if (direction == -1) searchContext = SearchContext.TARGET_CODES;
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
LexBIGServiceConvenienceMethods lbscm =
new DataUtils().createLexBIGServiceConvenienceMethods(lbSvc);
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
if (version != null)
csvt.setVersion(version);
ResolvedConceptReferencesIteratorWrapper wrapper = searchByCode(
scheme, version, code, "exactMatch", searchContext, -1);
if (wrapper == null) return null;
ResolvedConceptReferencesIterator iterator = wrapper.getIterator();
if (iterator == null) return null;
HashMap hmap = new HashMap();
ArrayList list = new ArrayList();
int knt = 0;
try {
while (iterator.hasNext()) {
knt++;
System.out.println("knt: " + knt);
ResolvedConceptReference ref = (ResolvedConceptReference) iterator.next();
System.out.println("ref.etCode(): " + ref.getCode());
System.out.println("ref name: " + ref.getEntityDescription().getContent());
System.out.println("ref coding scheme: " + ref.getCodingSchemeName());
/*
AssociationList asso_of = ref.getTargetOf();
if (direction == -1) {
asso_of = ref.getSourceOf();
}
*/
AssociationList asso_of = ref.getSourceOf();
if (asso_of == null) {
System.out.println("asso_of == null ??? " );
}
if (asso_of != null) {
Association[] associations =
asso_of.getAssociation();
if (associations == null) {
System.out.println("associations == null??? " );
}
if (associations != null) {
System.out.println("associations.length: " + associations.length);
for (int i = 0; i < associations.length; i++) {
Association assoc = associations[i];
String associationName = null;
try {
associationName = lbscm
.getAssociationNameFromAssociationCode(
scheme, csvt, assoc
.getAssociationName());
} catch (Exception ex) {
associationName = assoc.getAssociationName();
}
System.out.println("associationName: " + associationName);
AssociatedConcept[] acl =
assoc.getAssociatedConcepts()
.getAssociatedConcept();
System.out.println("acl.length: " + acl.length);
for (int j = 0; j < acl.length; j++) {
AssociatedConcept ac = acl[j];
EntityDescription ed =
ac.getEntityDescription();
String name = "No Description";
if (ed != null)
name = ed.getContent();
String pt = name;
if (associationName
.compareToIgnoreCase("equivalentClass") != 0
&& ac.getConceptCode().indexOf("@") == -1) {
String relaValue =
replaceAssociationNameByRela(
ac, associationName);
String s =
relaValue + "|" + pt + "|"
+ ac.getConceptCode() + "|"
+ ac.getCodingSchemeName();
if (direction == -1) {
s = relaValue + "|" + ref.getEntityDescription().getContent() + "|"
+ ref.getCode() + "|"
+ ref.getCodingSchemeName();
}
if (ac.getAssociationQualifiers() != null) {
String qualifiers = "";
for (NameAndValue qual : ac
.getAssociationQualifiers()
.getNameAndValue()) {
String qualifier_name = qual.getName();
String qualifier_value = qual.getContent();
qualifiers = qualifiers + (qualifier_name + ":" + qualifier_value) + "$";
}
s = s + "|" + qualifiers;
}
if (direction == -1) {
s = s + "|" + ref.getCodeNamespace();
} else {
s = s + "|" + ac.getCodeNamespace();
}
System.out.println(s);
list.add(s);
}
}
}
}
}
}
if (list.size() > 0) {
Collections.sort(list);
}
if (direction == 1) {
hmap.put(TYPE_ASSOCIATION, list);
} else {
hmap.put(TYPE_INVERSE_ASSOCIATION, list);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return hmap;
}
}
|
software/ncitbrowser/src/java/gov/nih/nci/evs/browser/utils/MappingSearchUtils.java
|
package gov.nih.nci.evs.browser.utils;
import java.util.*;
import java.sql.*;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.*;
import org.LexGrid.LexBIG.DataModel.Collections.*;
import org.LexGrid.LexBIG.DataModel.Core.*;
import org.LexGrid.LexBIG.Exceptions.*;
import org.LexGrid.LexBIG.Impl.*;
import org.LexGrid.LexBIG.LexBIGService.*;
import org.LexGrid.concepts.*;
import org.LexGrid.LexBIG.Utility.Iterators.*;
import org.LexGrid.codingSchemes.*;
import org.LexGrid.LexBIG.Utility.*;
import org.LexGrid.LexBIG.DataModel.Core.types.*;
import org.LexGrid.LexBIG.Extensions.Generic.*;
import org.LexGrid.naming.*;
import org.LexGrid.LexBIG.DataModel.InterfaceElements.*;
import org.LexGrid.commonTypes.*;
import gov.nih.nci.evs.browser.properties.*;
import gov.nih.nci.evs.browser.common.*;
import org.apache.commons.codec.language.*;
import org.apache.log4j.*;
import org.LexGrid.relations.Relations;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.Mapping.SearchContext;
import org.LexGrid.LexBIG.Extensions.Generic.*;
import org.LexGrid.LexBIG.Extensions.Generic.SupplementExtension;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.Direction;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.MappingSortOption;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.MappingSortOptionName;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.QualifierSortOption;
import org.LexGrid.LexBIG.Extensions.Generic.MappingExtension.Mapping;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.SearchDesignationOption;
import static gov.nih.nci.evs.browser.common.Constants.*;
import gov.nih.nci.evs.browser.bean.*;
/**
* <!-- LICENSE_TEXT_START -->
* Copyright 2008,2009 NGIT. This software was developed in conjunction
* with the National Cancer Institute, and so to the extent government
* employees are co-authors, any rights in such works shall be subject
* to Title 17 of the United States Code, section 105.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the disclaimer of Article 3,
* below. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 2. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by NGIT and the National
* Cancer Institute." If no such end-user documentation is to be
* included, this acknowledgment shall appear in the software itself,
* wherever such third-party acknowledgments normally appear.
* 3. The names "The National Cancer Institute", "NCI" and "NGIT" must
* not be used to endorse or promote products derived from this software.
* 4. This license does not authorize the incorporation of this software
* into any third party proprietary programs. This license does not
* authorize the recipient to use any trademarks owned by either NCI
* or NGIT
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE
* DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE,
* NGIT, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* <!-- LICENSE_TEXT_END -->
*/
/**
* @author EVS Team
* @version 1.0
*
* Modification history Initial implementation kim.ong@ngc.com
*
*/
public class MappingSearchUtils {
private static Logger _logger = Logger.getLogger(SearchUtils.class);
public MappingSearchUtils() {
}
public static String getMappingRelationsContainerName(String scheme, String version) {
CodingSchemeVersionOrTag versionOrTag =
new CodingSchemeVersionOrTag();
if (version != null) {
versionOrTag.setVersion(version);
}
String relationsContainerName = null;
try {
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
CodingScheme cs = lbSvc.resolveCodingScheme(scheme, versionOrTag);
if (cs == null) return null;
java.util.Enumeration<? extends Relations> relations = cs.enumerateRelations();
while (relations.hasMoreElements()) {
Relations relation = (Relations) relations.nextElement();
Boolean isMapping = relation.getIsMapping();
System.out.println("isMapping: " + isMapping);
if (isMapping != null && isMapping.equals(Boolean.TRUE)) {
relationsContainerName = relation.getContainerName();
break;
}
}
if (relationsContainerName == null) {
System.out.println("WARNING: Mapping container not found in " + scheme);
return null;
} else {
System.out.println("relationsContainerName " + relationsContainerName);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return relationsContainerName;
}
private CodedNodeSet.PropertyType[] getAllNonPresentationPropertyTypes() {
CodedNodeSet.PropertyType[] propertyTypes =
new CodedNodeSet.PropertyType[3];
propertyTypes[0] = PropertyType.COMMENT;
propertyTypes[1] = PropertyType.DEFINITION;
propertyTypes[2] = PropertyType.GENERIC;
return propertyTypes;
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByCode(scheme, version, matchText, matchAlgorithm, SearchContext.SOURCE_OR_TARGET_CODES, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
return searchByCode(
schemes, versions, matchText,
matchAlgorithm, SearchContext.SOURCE_OR_TARGET_CODES, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
String scheme, String version, String matchText,
String matchAlgorithm, SearchContext searchContext, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByCode(schemes, versions, matchText, matchAlgorithm, searchContext, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByCode(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, SearchContext searchContext, int maxToReturn) {
System.out.println("============================== MappingSearchUtils searchByCode");
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByCode ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
System.out.println(scheme + " (version: " + version);
String containerName = getMappingRelationsContainerName(scheme, version);
System.out.println("\tcontainer name: " + containerName);
if (containerName != null) {
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
ConceptReferenceList codeList = new ConceptReferenceList();
ConceptReference ref = new ConceptReference();
ref.setConceptCode(matchText);
codeList.addConceptReference(ref);
mapping = mapping.restrictToCodes(codeList, searchContext);
itr = mapping.resolveMapping();
if (itr != null) {
try {
numberRemaining = itr.numberRemaining();
System.out.println("\tsearchByCode matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public ResolvedConceptReferencesIteratorWrapper searchByName(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByName(schemes, versions, matchText, matchAlgorithm, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByName(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByName ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
String containerName = getMappingRelationsContainerName(scheme, version);
if (containerName != null) {
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
mapping = mapping.restrictToMatchingDesignations(
matchText, SearchDesignationOption.ALL, matchAlgorithm, null, SearchContext.SOURCE_OR_TARGET_CODES
);
//Finally, resolve the Mapping.
itr = mapping.resolveMapping();
try {
numberRemaining = itr.numberRemaining();
System.out.println("Number of matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
//return null;
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public ResolvedConceptReferencesIteratorWrapper searchByProperties(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
System.out.println("searchByProperties scheme: " + scheme);
System.out.println("searchByProperties version: " + version);
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByProperties(schemes, versions, matchText, matchAlgorithm, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByProperties(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByName ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
CodedNodeSet.PropertyType[] propertyTypes = null;
LocalNameList propertyNames = null;
LocalNameList sourceList = null;
propertyTypes = getAllNonPresentationPropertyTypes();
LocalNameList contextList = null;
NameAndValueList qualifierList = null;
String language = null;
// to be modified
SearchContext searchContext = SearchContext.SOURCE_OR_TARGET_CODES
;
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
System.out.println("schemes.size(): " + schemes.size() + " lcv: " + lcv);
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
String containerName = getMappingRelationsContainerName(scheme, version);
if (containerName != null) {
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
mapping = mapping.restrictToMatchingProperties(
propertyNames,
propertyTypes,
sourceList,
contextList,
qualifierList,
matchAlgorithm,
language,
null,
searchContext);
//Finally, resolve the Mapping.
itr = mapping.resolveMapping();
try {
numberRemaining = itr.numberRemaining();
System.out.println("Number of matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
//return null;
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public LocalNameList getSupportedAssociationNames(LexBIGService lbSvc, String scheme,
String version, String containerName) {
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
if (version != null)
csvt.setVersion(version);
LocalNameList list = new LocalNameList();
try {
CodingScheme cs = lbSvc.resolveCodingScheme(scheme, csvt);
Relations[] relations = cs.getRelations();
for (int i = 0; i < relations.length; i++) {
Relations relation = relations[i];
_logger.debug("** getSupportedRoleNames containerName: "
+ relation.getContainerName());
if (relation.getContainerName().compareToIgnoreCase(containerName) == 0) {
org.LexGrid.relations.AssociationPredicate[] asso_array =
relation.getAssociationPredicate();
for (int j = 0; j < asso_array.length; j++) {
org.LexGrid.relations.AssociationPredicate association =
(org.LexGrid.relations.AssociationPredicate) asso_array[j];
// list.add(association.getAssociationName());
// KLO, 092209
//list.add(association.getForwardName());
list.addEntry(association.getAssociationName());
}
}
}
} catch (Exception ex) {
}
return list;
}
public ResolvedConceptReferencesIteratorWrapper searchByRelationships(
String scheme, String version, String matchText,
String matchAlgorithm, int maxToReturn) {
Vector schemes = new Vector();
schemes.add(scheme);
Vector versions = new Vector();
versions.add(version);
return searchByRelationships(schemes, versions, matchText, matchAlgorithm, maxToReturn);
}
public ResolvedConceptReferencesIteratorWrapper searchByRelationships(
Vector schemes, Vector versions, String matchText,
String matchAlgorithm, int maxToReturn) {
if (matchText == null || matchText.trim().length() == 0)
return null;
matchText = matchText.trim();
_logger.debug("searchByName ... " + matchText);
if (matchAlgorithm.compareToIgnoreCase("contains") == 0)
{
matchAlgorithm = new SearchUtils().findBestContainsAlgorithm(matchText);
}
SearchDesignationOption option = SearchDesignationOption.ALL;
String language = null;
CodedNodeSet.PropertyType[] propertyTypes = null;
LocalNameList propertyNames = null;
LocalNameList sourceList = null;
propertyTypes = getAllNonPresentationPropertyTypes();
LocalNameList contextList = null;
NameAndValueList qualifierList = null;
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
MappingExtension mappingExtension = null;
try {
mappingExtension = (MappingExtension)lbSvc.getGenericExtension("MappingExtension");
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
ResolvedConceptReferencesIterator itr = null;
int lcv = 0;
String scheme = null;
String version = null;
System.out.println("schemes.size(): " + schemes.size() + " lcv: " + lcv);
int numberRemaining = -1;
while (itr == null && numberRemaining == -1 && lcv < schemes.size()) {
scheme = (String) schemes.elementAt(lcv);
version = (String) versions.elementAt(lcv);
String containerName = getMappingRelationsContainerName(scheme, version);
if (containerName != null) {
LocalNameList relationshipList = getSupportedAssociationNames(lbSvc, scheme, version, containerName);
try {
Mapping mapping =
mappingExtension.getMapping(scheme, null, containerName);
if (mapping != null) {
mapping = mapping.restrictToRelationship(
matchText,
option,
matchAlgorithm,
language,
relationshipList);
//Finally, resolve the Mapping.
itr = mapping.resolveMapping();
try {
numberRemaining = itr.numberRemaining();
System.out.println("Number of matches: " + numberRemaining);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
//return null;
}
}
lcv++;
}
if (itr != null) {
ResolvedConceptReferencesIteratorWrapper wrapper = new ResolvedConceptReferencesIteratorWrapper(itr);
wrapper.setCodingSchemeName(scheme);
wrapper.setCodingSchemeVersion(version);
return wrapper;
}
return null;
}
public static ResolvedConceptReferencesIterator getRestrictedMappingDataIterator(String scheme, String version,
List<MappingSortOption> sortOptionList, ResolvedConceptReferencesIterator searchResultsIterator) {
return getRestrictedMappingDataIterator(scheme, version,
sortOptionList, searchResultsIterator, SearchContext.SOURCE_OR_TARGET_CODES);
}
public static ResolvedConceptReferencesIterator getRestrictedMappingDataIterator(String scheme, String version,
List<MappingSortOption> sortOptionList, ResolvedConceptReferencesIterator searchResultsIterator, SearchContext context) {
System.out.println("(***********) getRestrictedMappingDataIterator ...");
if (searchResultsIterator == null) return null;
try {
int numRemaining = searchResultsIterator.numberRemaining();
System.out.println("(***********) searchResultsIterator passing number of matches: " + numRemaining);
} catch (Exception e) {
System.out.println("searchResultsIterator.numberRemaining() throws exception???");
return null;
}
CodingSchemeVersionOrTag versionOrTag =
new CodingSchemeVersionOrTag();
if (version != null) {
versionOrTag.setVersion(version);
}
String relationsContainerName = null;
LexBIGService distributed = RemoteServerUtil.createLexBIGService();
try {
CodingScheme cs = distributed.resolveCodingScheme(scheme, versionOrTag);
if (cs == null) return null;
java.util.Enumeration<? extends Relations> relations = cs.enumerateRelations();
while (relations.hasMoreElements()) {
Relations relation = (Relations) relations.nextElement();
Boolean isMapping = relation.getIsMapping();
System.out.println("isMapping: " + isMapping);
if (isMapping != null && isMapping.equals(Boolean.TRUE)) {
relationsContainerName = relation.getContainerName();
break;
}
}
if (relationsContainerName == null) {
System.out.println("WARNING: Mapping container not found in " + scheme);
return null;
} else {
System.out.println("relationsContainerName " + relationsContainerName);
}
MappingExtension mappingExtension = (MappingExtension)
distributed.getGenericExtension("MappingExtension");
Mapping mapping =
mappingExtension.getMapping(scheme, versionOrTag, relationsContainerName);
//ConceptReferenceList codeList (to be derived based on ResolvedConceptReferencesIterator searchResultsIterator)
ConceptReferenceList codeList = new ConceptReferenceList();
System.out.println("getRestrictedMappingDataIterator Step 5 while loop -- retrieving refs");
if (searchResultsIterator != null) {
int lcv = 0;
while(searchResultsIterator.hasNext()){
ResolvedConceptReference[] refs = searchResultsIterator.next(100).getResolvedConceptReference();
for(ResolvedConceptReference ref : refs){
lcv++;
System.out.println("(" + lcv + ") " + ref.getEntityDescription().getContent() + "(" + ref.getCode() + ")");
codeList.addConceptReference((ConceptReference) ref);
}
}
} else {
System.out.println("resolved_value_set.jsp ResolvedConceptReferencesIterator == NULL???");
}
mapping = mapping.restrictToCodes(codeList, context);
ResolvedConceptReferencesIterator itr = mapping.resolveMapping(sortOptionList);
return itr;
} catch (Exception ex) {
//ex.printStackTrace();
System.out.println("getRestrictedMappingDataIterator throws exceptions???");
}
return null;
}
public List getMappingRelationship(
String scheme, String version, String code, int direction) {
SearchContext searchContext = SearchContext.SOURCE_OR_TARGET_CODES;
if (direction == 1) searchContext = SearchContext.SOURCE_CODES;
else if (direction == -1) searchContext = SearchContext.TARGET_CODES;
ResolvedConceptReferencesIteratorWrapper wrapper = searchByCode(
scheme, version, code, "exactMatch", searchContext, -1);
if (wrapper == null) return null;
ResolvedConceptReferencesIterator iterator = wrapper.getIterator();
if (iterator == null) return null;
int numberRemaining = 0;
try {
numberRemaining = iterator.numberRemaining();
if (numberRemaining == 0) {
return null;
}
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
MappingIteratorBean mappingIteratorBean = new MappingIteratorBean(
iterator,
numberRemaining, // number remaining
0, // istart
50, // iend,
numberRemaining, // size,
0, // pageNumber,
1); // numberPages
mappingIteratorBean.initialize(
iterator,
numberRemaining, // number remaining
0, // istart
50, // iend,
numberRemaining, // size,
0, // pageNumber,
1); // numberPages
return mappingIteratorBean.getData(0, numberRemaining); // implement getAll
}
/*
public static String TYPE_ROLE = "type_role";
public static String TYPE_ASSOCIATION = "type_association";
public static String TYPE_SUPERCONCEPT = "type_superconcept";
public static String TYPE_SUBCONCEPT = "type_subconcept";
public static String TYPE_INVERSE_ROLE = "type_inverse_role";
public static String TYPE_INVERSE_ASSOCIATION = "type_inverse_association";
*/
private String replaceAssociationNameByRela(AssociatedConcept ac,
String associationName) {
if (ac.getAssociationQualifiers() == null)
return associationName;
if (ac.getAssociationQualifiers().getNameAndValue() == null)
return associationName;
for (NameAndValue qual : ac.getAssociationQualifiers()
.getNameAndValue()) {
String qualifier_name = qual.getName();
String qualifier_value = qual.getContent();
if (qualifier_name.compareToIgnoreCase("rela") == 0) {
return qualifier_value; // replace associationName by Rela value
}
}
return associationName;
}
public HashMap getMappingRelationshipHashMap(String scheme, String version, String code) {
HashMap hmap = new HashMap();
HashMap map1 = getMappingRelationshipHashMap(scheme, version, code, 1);
ArrayList list = (ArrayList) map1.get(TYPE_ASSOCIATION);
if (list != null) {
hmap.put(TYPE_ASSOCIATION, list);
}
HashMap map2 = getMappingRelationshipHashMap(scheme, version, code, -1);
list = (ArrayList) map2.get(TYPE_INVERSE_ASSOCIATION);
if (list != null) {
hmap.put(TYPE_INVERSE_ASSOCIATION, list);
}
return hmap;
}
public HashMap getMappingRelationshipHashMap(
String scheme, String version, String code, int direction) {
System.out.println("========== Calling getMappingRelationshipHashMap direction " + direction);
SearchContext searchContext = SearchContext.SOURCE_OR_TARGET_CODES;
if (direction == 1) searchContext = SearchContext.SOURCE_CODES;
else if (direction == -1) searchContext = SearchContext.TARGET_CODES;
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
LexBIGServiceConvenienceMethods lbscm =
new DataUtils().createLexBIGServiceConvenienceMethods(lbSvc);
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
if (version != null)
csvt.setVersion(version);
ResolvedConceptReferencesIteratorWrapper wrapper = searchByCode(
scheme, version, code, "exactMatch", searchContext, -1);
if (wrapper == null) return null;
ResolvedConceptReferencesIterator iterator = wrapper.getIterator();
if (iterator == null) return null;
HashMap hmap = new HashMap();
ArrayList list = new ArrayList();
int knt = 0;
try {
while (iterator.hasNext()) {
knt++;
System.out.println("knt: " + knt);
ResolvedConceptReference ref = (ResolvedConceptReference) iterator.next();
System.out.println("ref.etCode(): " + ref.getCode());
System.out.println("ref name: " + ref.getEntityDescription().getContent());
System.out.println("ref coding scheme: " + ref.getCodingSchemeName());
AssociationList asso_of = ref.getTargetOf();
if (direction == -1) {
asso_of = ref.getSourceOf();
}
if (asso_of == null) {
System.out.println("asso_of == null ??? " );
}
if (asso_of != null) {
Association[] associations =
asso_of.getAssociation();
if (associations == null) {
System.out.println("associations == null??? " );
}
if (associations != null) {
System.out.println("associations.length: " + associations.length);
for (int i = 0; i < associations.length; i++) {
Association assoc = associations[i];
String associationName = null;
try {
associationName = lbscm
.getAssociationNameFromAssociationCode(
scheme, csvt, assoc
.getAssociationName());
} catch (Exception ex) {
associationName = assoc.getAssociationName();
}
System.out.println("associationName: " + associationName);
AssociatedConcept[] acl =
assoc.getAssociatedConcepts()
.getAssociatedConcept();
System.out.println("acl.length: " + acl.length);
for (int j = 0; j < acl.length; j++) {
AssociatedConcept ac = acl[j];
EntityDescription ed =
ac.getEntityDescription();
String name = "No Description";
if (ed != null)
name = ed.getContent();
String pt = name;
if (associationName
.compareToIgnoreCase("equivalentClass") != 0
&& ac.getConceptCode().indexOf("@") == -1) {
String relaValue =
replaceAssociationNameByRela(
ac, associationName);
String s =
relaValue + "|" + pt + "|"
+ ac.getConceptCode() + "|"
+ ac.getCodingSchemeName();
if (direction == -1) {
s = relaValue + "|" + ref.getEntityDescription().getContent() + "|"
+ ref.getCode() + "|"
+ ref.getCodingSchemeName();
}
if (ac.getAssociationQualifiers() != null) {
String qualifiers = "";
for (NameAndValue qual : ac
.getAssociationQualifiers()
.getNameAndValue()) {
String qualifier_name = qual.getName();
String qualifier_value = qual.getContent();
qualifiers = qualifiers + (qualifier_name + ":" + qualifier_value) + "$";
}
s = s + "|" + qualifiers;
}
s = s + "|" + ac.getCodeNamespace();
System.out.println(s);
list.add(s);
}
}
}
}
}
}
if (list.size() > 0) {
Collections.sort(list);
}
if (direction == 1) {
hmap.put(TYPE_ASSOCIATION, list);
} else {
hmap.put(TYPE_INVERSE_ASSOCIATION, list);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return hmap;
}
}
|
[#29301] Support the search and display of mapping data. [KLO, 022011]
SVN-Revision: 1972
|
software/ncitbrowser/src/java/gov/nih/nci/evs/browser/utils/MappingSearchUtils.java
|
[#29301] Support the search and display of mapping data. [KLO, 022011]
|
|
Java
|
mit
|
1734d388ef8a74abb8428b6871ae165b338c67bc
| 0
|
swstack/Bean-Android-SDK,colus001/Bean-Android-SDK,androidgrl/Bean-Android-SDK,androidgrl/Bean-Android-SDK,hongbinz/Bean-Android-SDK,PunchThrough/bean-sdk-android,hongbinz/Bean-Android-SDK,PunchThrough/bean-sdk-android,colus001/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,swstack/Bean-Android-SDK,PunchThrough/Bean-Android-SDK
|
package com.punchthrough.bean.sdk.internal.utility;
import com.punchthrough.bean.sdk.internal.exception.NoEnumFoundException;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
public class Misc {
/**
* Clamp an int to a min/max value.
*
* @param n The value to be clamped
* @param min The minimum
* @param max The maximum
* @return The value passed in, or minimum if n < minimum, or maximum if n > maximum
*/
public static int clamp(int n, int min, int max) {
if (n < min) return min;
if (n > max) return max;
return n;
}
/**
* Clamp an int to the uint8 (0-255) range.
*
* @param n The value to be clamped
* @return The value clamped between 0 and 255
*/
public static int clampToUInt8(int n) {
return clamp(n, 0, 255);
}
/**
* Convert a string of ASCII hex characters (e.g. "DEADBEEF0042") to an array of bytes the hex
* represents (e.g. [0xDE, 0xAD, 0xBE, 0xEF, 0x00, 0x42]). Treat bytes returned by this method
* as unsigned.
*
* @param hex The string of hex characters
* @return An array of bytes the string represents
* @throws DecoderException If the string passed in isn't made up of valid hex bytes
*/
public static byte[] asciiHexToBytes(String hex) throws DecoderException {
return Hex.decodeHex(hex.toCharArray());
}
// From http://stackoverflow.com/a/4768950/254187
/**
* Convert two unsigned bytes to one signed int.
*
* @param high The high byte
* @param low The low byte
* @return The high byte combined with the low byte as an unsigned int
*/
public static int bytesToInt(byte high, byte low) {
return ( (high & 0xFF) << 8 ) | ( low & 0xFF );
}
/**
* Convert an array of two unsigned bytes with the given byte order to one signed int.
*
* @param bytes The bytes to be parsed
* @param order The byte order to be used
* @return An int representing the bytes in the given order
*/
public static int twoBytesToInt(byte[] bytes, ByteOrder order) {
if (order == ByteOrder.BIG_ENDIAN) {
return bytesToInt(bytes[0], bytes[1]);
} else if (order == ByteOrder.LITTLE_ENDIAN) {
return bytesToInt(bytes[1], bytes[0]);
} else {
throw new IllegalArgumentException("ByteOrder must be BIG_ENDIAN or LITTLE_ENDIAN");
}
}
public static byte[] intToTwoBytes(int i, ByteOrder order) {
byte[] bytes = ByteBuffer.allocate(4).order(order).putInt(i).array();
if (order == ByteOrder.LITTLE_ENDIAN) {
return Arrays.copyOfRange(bytes, 0, 2);
} else if (order == ByteOrder.BIG_ENDIAN) {
return Arrays.copyOfRange(bytes, 2, 4);
} else {
throw new IllegalArgumentException("ByteOrder must be BIG_ENDIAN or LITTLE_ENDIAN");
}
}
/**
* Convert an int to an unsigned byte.
*
* @param i The int to be converted
* @return The int in unsigned byte form
*/
public static byte intToByte(int i) {
return (byte) (i & 0xFF);
}
/**
* Convert an array of ints to an array of unsigned bytes. This is useful when you want to
* construct a literal array of unsigned bytes with values greater than 127.
* Only the lowest 8 bits of the int values are used.
*
* @param intArray The array of ints to be converted
* @return The corresponding array of unsigned bytes
*/
public static byte[] intArrayToByteArray(int[] intArray) {
byte[] byteArray = new byte[intArray.length];
for (int i = 0; i < intArray.length; i++) {
byteArray[i] = intToByte(intArray[i]);
}
return byteArray;
}
/**
* Convert an int to a four-byte array of its representation as an unsigned byte.
*
* @param i The int to be converted
* @param endian The {@link java.nio.ByteOrder} endianness of the desired byte array
* @return The array of bytes representing the 32-bit unsigned integer
*/
public static byte[] intToUInt32(int i, ByteOrder endian) {
int truncated = (int) ( (long) i );
return ByteBuffer.allocate(4).order(endian).putInt(truncated).array();
}
// Based on http://stackoverflow.com/a/16406386/254187
/**
* Retrieve the enum of a given type from a given raw value. Enums must implement the
* {@link com.punchthrough.bean.sdk.internal.utility.RawValuable} interface to ensure they have
* a {@link RawValuable#getRawValue()} method.
*
* @param enumClass The class of the enum type being parsed, e.g. <code>BeanState.class</code>
* @param value The raw int value of the enum to be retrieved
* @param <T> The enum type being parsed
* @return The enum value with the given raw value
*
* @throws NoEnumFoundException if the given enum type has no enum value with a raw value
* matching the given value
*/
public static <T extends Enum & RawValuable> T enumWithRawValue(Class<T> enumClass, int value)
throws NoEnumFoundException {
for (Object oneEnumRaw : EnumSet.allOf(enumClass)) {
T oneEnum = (T) oneEnumRaw;
if (value == oneEnum.getRawValue()) {
return oneEnum;
}
}
throw new NoEnumFoundException(String.format(
"No enum found for class %s with raw value %d", enumClass.getName(), value));
}
/**
* Retrieve the enum of a given type from a given raw value. Enums must implement the
* {@link com.punchthrough.bean.sdk.internal.utility.RawValuable} interface to ensure they have
* a {@link RawValuable#getRawValue()} method.
*
* @param enumClass The class of the enum type being parsed, e.g. <code>BeanState.class</code>
* @param value The raw byte value of the enum to be retrieved
* @param <T> The enum type being parsed
* @return The enum value with the given raw value
*
* @throws NoEnumFoundException if the given enum type has no enum value with a raw value
* matching the given value
*/
public static <T extends Enum & RawValuable> T enumWithRawValue(Class<T> enumClass, byte value)
throws NoEnumFoundException {
return enumWithRawValue(enumClass, (int) value);
}
/**
* Retrieve a number of raw bytes at an offset.
*
* @param offset The byte at which to start, zero-indexed
* @param length The number of bytes to return. If this is greater than the number of bytes
* available after <code>offset</code>, it will return all available bytes,
* truncated at the end.
* @return The bytes, starting at <code>offset</code> of length <code>length</code> or
* less if truncated
*/
public static <T extends Chunkable> byte[] bytesFromChunkable(T chunkable, int offset, int length) {
byte[] data = chunkable.getChunkableData();
if ( offset + length > data.length ) {
// Arrays.copyOfRange appends 0s when the array end is exceeded.
// Trim length manually to avoid appending extra data.
return Arrays.copyOfRange(data, offset, data.length);
} else {
return Arrays.copyOfRange(data, offset, offset + length);
}
}
/**
* Retrieve a chunk of raw bytes. Chunks are created by slicing the array at even intervals.
* The final chunk may be shorter than the other chunks if it's been truncated.
*
* @param chunkLength The length of each chunk
* @param chunkNum The chunk at which to start, zero-indexed
* @return The chunk (array of bytes)
*/
public static <T extends Chunkable> byte[] chunkFromChunkable(
T chunkable, int chunkLength, int chunkNum) {
int start = chunkNum * chunkLength;
return bytesFromChunkable(chunkable, start, chunkLength);
}
/**
* Retrieve the count of chunks for a given chunk length.
*
* @param chunkLength The length of each chunk
* @return The number of chunks generated for a given chunk length
*/
public static <T extends Chunkable> int chunkCountFromChunkable(T chunkable, int chunkLength) {
byte[] data = chunkable.getChunkableData();
return (int) Math.ceil(data.length * 1.0 / chunkLength);
}
/**
* Retrieve all chunks for a given chunk length.
* The final chunk may be shorter than the other chunks if it's been truncated.
*
* @param chunkLength The length of each chunk
* @return A list of chunks (byte arrays)
*/
public static <T extends Chunkable> List<byte[]> chunksFromChunkable(T chunkable, int chunkLength) {
List<byte[]> chunks = new ArrayList<>();
int chunkCount = chunkCountFromChunkable(chunkable, chunkLength);
for (int i = 0; i < chunkCount; i++) {
byte[] chunk = chunkFromChunkable(chunkable, chunkLength, i);
chunks.add(chunk);
}
return chunks;
}
}
|
beansdk/src/main/java/com/punchthrough/bean/sdk/internal/utility/Misc.java
|
package com.punchthrough.bean.sdk.internal.utility;
import com.punchthrough.bean.sdk.internal.exception.NoEnumFoundException;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
public class Misc {
/**
* Clamp an int to a min/max value.
*
* @param n The value to be clamped
* @param min The minimum
* @param max The maximum
* @return The value passed in, or minimum if n < minimum, or maximum if n > maximum
*/
public static int clamp(int n, int min, int max) {
if (n < min) return min;
if (n > max) return max;
return n;
}
/**
* Clamp an int to the uint8 (0-255) range.
*
* @param n The value to be clamped
* @return The value clamped between 0 and 255
*/
public static int clampToUInt8(int n) {
return clamp(n, 0, 255);
}
/**
* Convert a string of ASCII hex characters (e.g. "DEADBEEF0042") to an array of bytes the hex
* represents (e.g. [0xDE, 0xAD, 0xBE, 0xEF, 0x00, 0x42]). Treat bytes returned by this method
* as unsigned.
*
* @param hex The string of hex characters
* @return An array of bytes the string represents
* @throws DecoderException If the string passed in isn't made up of valid hex bytes
*/
public static byte[] asciiHexToBytes(String hex) throws DecoderException {
return Hex.decodeHex(hex.toCharArray());
}
// From http://stackoverflow.com/a/4768950/254187
/**
* Convert two unsigned bytes to one signed int.
*
* @param high The high byte
* @param low The low byte
* @return The high byte combined with the low byte as an unsigned int
*/
public static int bytesToInt(byte high, byte low) {
return ( (high & 0xFF) << 8 ) | ( low & 0xFF );
}
/**
* Convert an array of two unsigned bytes with the given byte order to one signed int.
*
* @param bytes The bytes to be parsed
* @param order The byte order to be used
* @return An int representing the bytes in the given order
*/
public static int twoBytesToInt(byte[] bytes, ByteOrder order) {
if (order == ByteOrder.BIG_ENDIAN) {
return bytesToInt(bytes[0], bytes[1]);
} else if (order == ByteOrder.LITTLE_ENDIAN) {
return bytesToInt(bytes[1], bytes[0]);
} else {
throw new IllegalArgumentException("ByteOrder must be BIG_ENDIAN or LITTLE_ENDIAN");
}
}
/**
* Convert an int to an unsigned byte.
*
* @param i The int to be converted
* @return The int in unsigned byte form
*/
public static byte intToByte(int i) {
return (byte) (i & 0xFF);
}
/**
* Convert an array of ints to an array of unsigned bytes. This is useful when you want to
* construct a literal array of unsigned bytes with values greater than 127.
* Only the lowest 8 bits of the int values are used.
*
* @param intArray The array of ints to be converted
* @return The corresponding array of unsigned bytes
*/
public static byte[] intArrayToByteArray(int[] intArray) {
byte[] byteArray = new byte[intArray.length];
for (int i = 0; i < intArray.length; i++) {
byteArray[i] = intToByte(intArray[i]);
}
return byteArray;
}
/**
* Convert an int to a four-byte array of its representation as an unsigned byte.
*
* @param i The int to be converted
* @param endian The {@link java.nio.ByteOrder} endianness of the desired byte array
* @return The array of bytes representing the 32-bit unsigned integer
*/
public static byte[] intToUInt32(int i, ByteOrder endian) {
int truncated = (int) ( (long) i );
return ByteBuffer.allocate(4).order(endian).putInt(truncated).array();
}
// Based on http://stackoverflow.com/a/16406386/254187
/**
* Retrieve the enum of a given type from a given raw value. Enums must implement the
* {@link com.punchthrough.bean.sdk.internal.utility.RawValuable} interface to ensure they have
* a {@link RawValuable#getRawValue()} method.
*
* @param enumClass The class of the enum type being parsed, e.g. <code>BeanState.class</code>
* @param value The raw int value of the enum to be retrieved
* @param <T> The enum type being parsed
* @return The enum value with the given raw value
*
* @throws NoEnumFoundException if the given enum type has no enum value with a raw value
* matching the given value
*/
public static <T extends Enum & RawValuable> T enumWithRawValue(Class<T> enumClass, int value)
throws NoEnumFoundException {
for (Object oneEnumRaw : EnumSet.allOf(enumClass)) {
T oneEnum = (T) oneEnumRaw;
if (value == oneEnum.getRawValue()) {
return oneEnum;
}
}
throw new NoEnumFoundException(String.format(
"No enum found for class %s with raw value %d", enumClass.getName(), value));
}
/**
* Retrieve the enum of a given type from a given raw value. Enums must implement the
* {@link com.punchthrough.bean.sdk.internal.utility.RawValuable} interface to ensure they have
* a {@link RawValuable#getRawValue()} method.
*
* @param enumClass The class of the enum type being parsed, e.g. <code>BeanState.class</code>
* @param value The raw byte value of the enum to be retrieved
* @param <T> The enum type being parsed
* @return The enum value with the given raw value
*
* @throws NoEnumFoundException if the given enum type has no enum value with a raw value
* matching the given value
*/
public static <T extends Enum & RawValuable> T enumWithRawValue(Class<T> enumClass, byte value)
throws NoEnumFoundException {
return enumWithRawValue(enumClass, (int) value);
}
/**
* Retrieve a number of raw bytes at an offset.
*
* @param offset The byte at which to start, zero-indexed
* @param length The number of bytes to return. If this is greater than the number of bytes
* available after <code>offset</code>, it will return all available bytes,
* truncated at the end.
* @return The bytes, starting at <code>offset</code> of length <code>length</code> or
* less if truncated
*/
public static <T extends Chunkable> byte[] bytesFromChunkable(T chunkable, int offset, int length) {
byte[] data = chunkable.getChunkableData();
if ( offset + length > data.length ) {
// Arrays.copyOfRange appends 0s when the array end is exceeded.
// Trim length manually to avoid appending extra data.
return Arrays.copyOfRange(data, offset, data.length);
} else {
return Arrays.copyOfRange(data, offset, offset + length);
}
}
/**
* Retrieve a chunk of raw bytes. Chunks are created by slicing the array at even intervals.
* The final chunk may be shorter than the other chunks if it's been truncated.
*
* @param chunkLength The length of each chunk
* @param chunkNum The chunk at which to start, zero-indexed
* @return The chunk (array of bytes)
*/
public static <T extends Chunkable> byte[] chunkFromChunkable(
T chunkable, int chunkLength, int chunkNum) {
int start = chunkNum * chunkLength;
return bytesFromChunkable(chunkable, start, chunkLength);
}
/**
* Retrieve the count of chunks for a given chunk length.
*
* @param chunkLength The length of each chunk
* @return The number of chunks generated for a given chunk length
*/
public static <T extends Chunkable> int chunkCountFromChunkable(T chunkable, int chunkLength) {
byte[] data = chunkable.getChunkableData();
return (int) Math.ceil(data.length * 1.0 / chunkLength);
}
/**
* Retrieve all chunks for a given chunk length.
* The final chunk may be shorter than the other chunks if it's been truncated.
*
* @param chunkLength The length of each chunk
* @return A list of chunks (byte arrays)
*/
public static <T extends Chunkable> List<byte[]> chunksFromChunkable(T chunkable, int chunkLength) {
List<byte[]> chunks = new ArrayList<>();
int chunkCount = chunkCountFromChunkable(chunkable, chunkLength);
for (int i = 0; i < chunkCount; i++) {
byte[] chunk = chunkFromChunkable(chunkable, chunkLength, i);
chunks.add(chunk);
}
return chunks;
}
}
|
Add intToTwoBytes for reconstructing UInt16 from Java ints
|
beansdk/src/main/java/com/punchthrough/bean/sdk/internal/utility/Misc.java
|
Add intToTwoBytes for reconstructing UInt16 from Java ints
|
|
Java
|
mit
|
e344c65a6b3ac89f8719f071c22579f642ef203f
| 0
|
kenzierocks/SpongeVanilla,kenzierocks/SpongeVanilla
|
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.server.mixin.server;
import net.minecraft.crash.CrashReport;
import net.minecraft.network.NetworkSystem;
import net.minecraft.network.ServerStatusResponse;
import net.minecraft.network.play.server.S03PacketTimeUpdate;
import net.minecraft.profiler.Profiler;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.gui.IUpdatePlayerListBox;
import net.minecraft.server.management.ServerConfigurationManager;
import net.minecraft.util.IChatComponent;
import net.minecraft.util.ReportedException;
import net.minecraft.world.WorldServer;
import org.apache.logging.log4j.Logger;
import org.spongepowered.api.GameState;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.NamedCause;
import org.spongepowered.api.event.game.state.GameStartedServerEvent;
import org.spongepowered.api.event.game.state.GameStoppedEvent;
import org.spongepowered.api.event.game.state.GameStoppedServerEvent;
import org.spongepowered.api.event.game.state.GameStoppingEvent;
import org.spongepowered.api.event.game.state.GameStoppingServerEvent;
import org.spongepowered.api.world.World;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.text.SpongeTexts;
import org.spongepowered.server.SpongeVanilla;
import org.spongepowered.server.world.VanillaDimensionManager;
import java.util.Hashtable;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.FutureTask;
@Mixin(MinecraftServer.class)
public abstract class MixinMinecraftServer {
@Shadow private static Logger logger;
@Shadow protected Queue<?> futureTaskQueue;
@Shadow private ServerConfigurationManager serverConfigManager;
@Shadow private Profiler theProfiler;
@Shadow private int tickCounter;
@Shadow abstract boolean getAllowNether();
@Shadow abstract NetworkSystem getNetworkSystem();
@Shadow List<?> playersOnline;
private Hashtable<Integer, long[]> worldTickTimes = new Hashtable<Integer, long[]>();
@Inject(method = "run", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;addFaviconToStatusResponse"
+ "(Lnet/minecraft/network/ServerStatusResponse;)V", shift = At.Shift.AFTER))
public void callServerStarted(CallbackInfo ci) {
SpongeVanilla.INSTANCE.postState(GameStartedServerEvent.class, GameState.SERVER_STARTED);
}
@Inject(method = "run", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;finalTick(Lnet/minecraft/crash/CrashReport;)V",
ordinal = 0, shift = At.Shift.BY, by = -9))
public void callServerStopping(CallbackInfo ci) {
SpongeVanilla.INSTANCE.postState(GameStoppingServerEvent.class, GameState.SERVER_STOPPING);
}
@Inject(method = "run", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;systemExitNow()V"))
public void callServerStopped(CallbackInfo ci) {
SpongeVanilla.INSTANCE.postState(GameStoppedServerEvent.class, GameState.SERVER_STOPPED);
SpongeVanilla.INSTANCE.postState(GameStoppingEvent.class, GameState.GAME_STOPPING);
SpongeVanilla.INSTANCE.postState(GameStoppedEvent.class, GameState.GAME_STOPPED);
}
@Inject(method = "addFaviconToStatusResponse", at = @At("HEAD"), cancellable = true)
public void onAddFaviconToStatusResponse(ServerStatusResponse response, CallbackInfo ci) {
// Don't load favicon twice
if (response.getFavicon() != null) {
ci.cancel();
}
}
@Inject(method = "stopServer", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/WorldServer;flush()V"),
locals = LocalCapture.CAPTURE_FAILHARD)
public void callWorldUnload(CallbackInfo ci, int i, WorldServer worldserver) {
SpongeImpl.postEvent(SpongeEventFactory.createUnloadWorldEvent(Cause.of(NamedCause.source(this)), (World) worldserver));
}
@Overwrite
public void updateTimeLightAndEntities() {
this.theProfiler.startSection("jobs");
Queue<?> queue = this.futureTaskQueue;
synchronized (this.futureTaskQueue) {
while (!this.futureTaskQueue.isEmpty()) {
try {
((FutureTask) this.futureTaskQueue.poll()).run();
} catch (Throwable throwable2) {
logger.fatal(throwable2);
}
}
}
this.theProfiler.endStartSection("levels");
int j;
Integer[] ids = VanillaDimensionManager.getIDs(this.tickCounter % 200 == 0);
for (j = 0; j < ids.length; ++j) {
int id = ids[j];
long i = System.nanoTime();
if (j == 0 || this.getAllowNether()) {
WorldServer worldserver = VanillaDimensionManager.getWorldFromDimId(id);
this.theProfiler.startSection(worldserver.getWorldInfo().getWorldName());
if (this.tickCounter % 20 == 0) {
this.theProfiler.startSection("timeSync");
this.serverConfigManager.sendPacketToAllPlayersInDimension(
new S03PacketTimeUpdate(worldserver.getTotalWorldTime(), worldserver.getWorldTime(),
worldserver.getGameRules().getGameRuleBooleanValue("doDaylightCycle")), worldserver.provider.getDimensionId());
this.theProfiler.endSection();
}
this.theProfiler.startSection("tick");
CrashReport crashreport;
try {
worldserver.tick();
} catch (Throwable throwable1) {
crashreport = CrashReport.makeCrashReport(throwable1, "Exception ticking world");
worldserver.addWorldInfoToCrashReport(crashreport);
throw new ReportedException(crashreport);
}
try {
worldserver.updateEntities();
} catch (Throwable throwable) {
crashreport = CrashReport.makeCrashReport(throwable, "Exception ticking world entities");
worldserver.addWorldInfoToCrashReport(crashreport);
throw new ReportedException(crashreport);
}
this.theProfiler.endSection();
this.theProfiler.startSection("tracker");
worldserver.getEntityTracker().updateTrackedEntities();
this.theProfiler.endSection();
this.theProfiler.endSection();
}
this.worldTickTimes.get(id)[this.tickCounter % 100] = System.nanoTime() - i;
}
this.theProfiler.endStartSection("dim_unloading");
VanillaDimensionManager.unloadWorlds(this.worldTickTimes);
this.theProfiler.endStartSection("connection");
this.getNetworkSystem().networkTick();
this.theProfiler.endStartSection("players");
this.serverConfigManager.onTick();
this.theProfiler.endStartSection("tickables");
for (j = 0; j < this.playersOnline.size(); ++j) {
((IUpdatePlayerListBox) this.playersOnline.get(j)).update();
}
this.theProfiler.endSection();
}
@Overwrite
public String getServerModName() {
return SpongeVanilla.INSTANCE.getName();
}
@Overwrite
public void addChatMessage(IChatComponent component) {
logger.info(SpongeTexts.toLegacy(component));
}
public Hashtable<Integer, long[]> getWorldTickTimes() {
return this.worldTickTimes;
}
// TODO: Temporary fix for https://github.com/SpongePowered/SpongeVanilla/issues/196, there has to be a better fix for this
// (Not completely sure what is causing it yet)
@Redirect(method = "callFromMainThread", at = @At(value = "INVOKE",
target = "Lnet/minecraft/server/MinecraftServer;isCallingFromMinecraftThread()Z"))
public boolean allowRunningTasksInShutdownThread(MinecraftServer server) {
return server.isCallingFromMinecraftThread() || Thread.currentThread().getName().equals("Server Shutdown Thread");
}
}
|
src/main/java/org/spongepowered/server/mixin/server/MixinMinecraftServer.java
|
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.server.mixin.server;
import net.minecraft.crash.CrashReport;
import net.minecraft.network.NetworkSystem;
import net.minecraft.network.ServerStatusResponse;
import net.minecraft.network.play.server.S03PacketTimeUpdate;
import net.minecraft.profiler.Profiler;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.gui.IUpdatePlayerListBox;
import net.minecraft.server.management.ServerConfigurationManager;
import net.minecraft.util.IChatComponent;
import net.minecraft.util.ReportedException;
import net.minecraft.world.WorldServer;
import org.apache.logging.log4j.Logger;
import org.spongepowered.api.GameState;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.NamedCause;
import org.spongepowered.api.event.game.state.GameStartedServerEvent;
import org.spongepowered.api.event.game.state.GameStoppedEvent;
import org.spongepowered.api.event.game.state.GameStoppedServerEvent;
import org.spongepowered.api.event.game.state.GameStoppingEvent;
import org.spongepowered.api.event.game.state.GameStoppingServerEvent;
import org.spongepowered.api.world.World;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.text.SpongeTexts;
import org.spongepowered.server.SpongeVanilla;
import org.spongepowered.server.world.VanillaDimensionManager;
import java.util.Hashtable;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.FutureTask;
@Mixin(MinecraftServer.class)
public abstract class MixinMinecraftServer {
@Shadow private static Logger logger;
@Shadow protected Queue<?> futureTaskQueue;
@Shadow private ServerConfigurationManager serverConfigManager;
@Shadow private Profiler theProfiler;
@Shadow private int tickCounter;
@Shadow abstract boolean getAllowNether();
@Shadow abstract NetworkSystem getNetworkSystem();
@Shadow List<?> playersOnline;
private Hashtable<Integer, long[]> worldTickTimes = new Hashtable<Integer, long[]>();
@Inject(method = "run", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;addFaviconToStatusResponse"
+ "(Lnet/minecraft/network/ServerStatusResponse;)V", shift = At.Shift.AFTER))
public void callServerStarted(CallbackInfo ci) {
SpongeVanilla.INSTANCE.postState(GameStartedServerEvent.class, GameState.SERVER_STARTED);
}
@Inject(method = "run", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;finalTick(Lnet/minecraft/crash/CrashReport;)V",
ordinal = 0, shift = At.Shift.BY, by = -9))
public void callServerStopping(CallbackInfo ci) {
SpongeVanilla.INSTANCE.postState(GameStoppingServerEvent.class, GameState.SERVER_STOPPING);
}
@Inject(method = "run", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/MinecraftServer;systemExitNow()V"))
public void callServerStopped(CallbackInfo ci) {
SpongeVanilla.INSTANCE.postState(GameStoppedServerEvent.class, GameState.SERVER_STOPPED);
SpongeVanilla.INSTANCE.postState(GameStoppingEvent.class, GameState.GAME_STOPPING);
SpongeVanilla.INSTANCE.postState(GameStoppedEvent.class, GameState.GAME_STOPPED);
}
@Inject(method = "addFaviconToStatusResponse", at = @At("HEAD"), cancellable = true)
public void onAddFaviconToStatusResponse(ServerStatusResponse response, CallbackInfo ci) {
// Don't load favicon twice
if (response.getFavicon() != null) {
ci.cancel();
}
}
@Inject(method = "stopServer", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/WorldServer;flush()V"),
locals = LocalCapture.CAPTURE_FAILHARD)
public void callWorldUnload(CallbackInfo ci, int i, WorldServer worldserver) {
SpongeImpl.postEvent(SpongeEventFactory.createUnloadWorldEvent(Cause.of(NamedCause.source(this)), (World) worldserver));
}
@Overwrite
public void updateTimeLightAndEntities() {
this.theProfiler.startSection("jobs");
Queue<?> queue = this.futureTaskQueue;
synchronized (this.futureTaskQueue) {
while (!this.futureTaskQueue.isEmpty()) {
try {
((FutureTask) this.futureTaskQueue.poll()).run();
} catch (Throwable throwable2) {
logger.fatal(throwable2);
}
}
}
this.theProfiler.endStartSection("levels");
int j;
Integer[] ids = VanillaDimensionManager.getIDs(this.tickCounter % 200 == 0);
for (j = 0; j < ids.length; ++j) {
int id = ids[j];
long i = System.nanoTime();
if (j == 0 || this.getAllowNether()) {
WorldServer worldserver = VanillaDimensionManager.getWorldFromDimId(id);
this.theProfiler.startSection(worldserver.getWorldInfo().getWorldName());
if (this.tickCounter % 20 == 0) {
this.theProfiler.startSection("timeSync");
this.serverConfigManager.sendPacketToAllPlayersInDimension(
new S03PacketTimeUpdate(worldserver.getTotalWorldTime(), worldserver.getWorldTime(),
worldserver.getGameRules().getGameRuleBooleanValue("doDaylightCycle")), worldserver.provider.getDimensionId());
this.theProfiler.endSection();
}
this.theProfiler.startSection("tick");
CrashReport crashreport;
try {
worldserver.tick();
} catch (Throwable throwable1) {
crashreport = CrashReport.makeCrashReport(throwable1, "Exception ticking world");
worldserver.addWorldInfoToCrashReport(crashreport);
throw new ReportedException(crashreport);
}
try {
worldserver.updateEntities();
} catch (Throwable throwable) {
crashreport = CrashReport.makeCrashReport(throwable, "Exception ticking world entities");
worldserver.addWorldInfoToCrashReport(crashreport);
throw new ReportedException(crashreport);
}
this.theProfiler.endSection();
this.theProfiler.startSection("tracker");
worldserver.getEntityTracker().updateTrackedEntities();
this.theProfiler.endSection();
this.theProfiler.endSection();
}
this.worldTickTimes.get(id)[this.tickCounter % 100] = System.nanoTime() - i;
}
this.theProfiler.endStartSection("dim_unloading");
VanillaDimensionManager.unloadWorlds(this.worldTickTimes);
this.theProfiler.endStartSection("connection");
this.getNetworkSystem().networkTick();
this.theProfiler.endStartSection("players");
this.serverConfigManager.onTick();
this.theProfiler.endStartSection("tickables");
for (j = 0; j < this.playersOnline.size(); ++j) {
((IUpdatePlayerListBox) this.playersOnline.get(j)).update();
}
this.theProfiler.endSection();
}
@Overwrite
public String getServerModName() {
return SpongeVanilla.INSTANCE.getName();
}
@Overwrite
public void addChatMessage(IChatComponent component) {
logger.info(SpongeTexts.toLegacy(component));
}
public Hashtable<Integer, long[]> getWorldTickTimes() {
return this.worldTickTimes;
}
}
|
Temporarily disable scheduling tasks in the main thread for the shutdown thread
Fixes #196
|
src/main/java/org/spongepowered/server/mixin/server/MixinMinecraftServer.java
|
Temporarily disable scheduling tasks in the main thread for the shutdown thread Fixes #196
|
|
Java
|
mit
|
a4b278c385456e6ccf99fe7b8f286cd13c483609
| 0
|
BranchMetrics/android-branch-deep-linking,BranchMetrics/Android-Deferred-Deep-Linking-SDK,BranchMetrics/android-branch-deep-linking,BranchMetrics/Android-Deferred-Deep-Linking-SDK,BranchMetrics/Android-Deferred-Deep-Linking-SDK,BranchMetrics/Android-Deferred-Deep-Linking-SDK,BranchMetrics/android-branch-deep-linking,BranchMetrics/android-branch-deep-linking
|
package io.branch.referral;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StyleRes;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.ref.WeakReference;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import io.branch.indexing.BranchUniversalObject;
import io.branch.indexing.ContentDiscoverer;
import io.branch.referral.util.CommerceEvent;
import io.branch.referral.util.LinkProperties;
/**
* <p>
* The core object required when using Branch SDK. You should declare an object of this type at
* the class-level of each Activity or Fragment that you wish to use Branch functionality within.
* </p>
* <p>
* Normal instantiation of this object would look like this:
* </p>
* <!--
* <pre style="background:#fff;padding:10px;border:2px solid silver;">
* Branch.getInstance(this.getApplicationContext()) // from an Activity
* Branch.getInstance(getActivity().getApplicationContext()) // from a Fragment
* </pre>
* -->
*/
public class Branch implements BranchViewHandler.IBranchViewEvents, SystemObserver.GAdsParamsFetchEvents, InstallListener.IInstallReferrerEvents {
private static final String TAG = "BranchSDK";
/**
* Hard-coded {@link String} that denotes a {@link BranchLinkData#tags}; applies to links that
* are shared with others directly as a user action, via social media for instance.
*/
public static final String FEATURE_TAG_SHARE = "share";
/**
* Hard-coded {@link String} that denotes a 'referral' tag; applies to links that are associated
* with a referral program, incentivized or not.
*/
public static final String FEATURE_TAG_REFERRAL = "referral";
/**
* Hard-coded {@link String} that denotes a 'referral' tag; applies to links that are sent as
* referral actions by users of an app using an 'invite contacts' feature for instance.
*/
public static final String FEATURE_TAG_INVITE = "invite";
/**
* Hard-coded {@link String} that denotes a link that is part of a commercial 'deal' or offer.
*/
public static final String FEATURE_TAG_DEAL = "deal";
/**
* Hard-coded {@link String} that denotes a link tagged as a gift action within a service or
* product.
*/
public static final String FEATURE_TAG_GIFT = "gift";
/**
* The code to be passed as part of a deal or gift; retrieved from the Branch object as a
* tag upon initialisation. Of {@link String} format.
*/
public static final String REDEEM_CODE = "$redeem_code";
/**
* <p>Default value of referral bucket; referral buckets contain credits that are used when users
* are referred to your apps. These can be viewed in the Branch dashboard under Referrals.</p>
*/
public static final String REFERRAL_BUCKET_DEFAULT = "default";
/**
* <p>Hard-coded value for referral code type. Referral codes will always result on "credit" actions.
* Even if they are of 0 value.</p>
*/
public static final String REFERRAL_CODE_TYPE = "credit";
/**
* Branch SDK version for the current release of the Branch SDK.
*/
public static final int REFERRAL_CREATION_SOURCE_SDK = 2;
/**
* Key value for referral code as a parameter.
*/
public static final String REFERRAL_CODE = "referral_code";
/**
* The redirect URL provided when the link is handled by a desktop client.
*/
public static final String REDIRECT_DESKTOP_URL = "$desktop_url";
/**
* The redirect URL provided when the link is handled by an Android device.
*/
public static final String REDIRECT_ANDROID_URL = "$android_url";
/**
* The redirect URL provided when the link is handled by an iOS device.
*/
public static final String REDIRECT_IOS_URL = "$ios_url";
/**
* The redirect URL provided when the link is handled by a large form-factor iOS device such as
* an iPad.
*/
public static final String REDIRECT_IPAD_URL = "$ipad_url";
/**
* The redirect URL provided when the link is handled by an Amazon Fire device.
*/
public static final String REDIRECT_FIRE_URL = "$fire_url";
/**
* The redirect URL provided when the link is handled by a Blackberry device.
*/
public static final String REDIRECT_BLACKBERRY_URL = "$blackberry_url";
/**
* The redirect URL provided when the link is handled by a Windows Phone device.
*/
public static final String REDIRECT_WINDOWS_PHONE_URL = "$windows_phone_url";
/**
* Open Graph: The title of your object as it should appear within the graph, e.g., "The Rock".
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_TITLE = "$og_title";
/**
* The description of the object to appear in social media feeds that use
* Facebook's Open Graph specification.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_DESC = "$og_description";
/**
* An image URL which should represent your object to appear in social media feeds that use
* Facebook's Open Graph specification.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_IMAGE_URL = "$og_image_url";
/**
* A URL to a video file that complements this object.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_VIDEO = "$og_video";
/**
* The canonical URL of your object that will be used as its permanent ID in the graph.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_URL = "$og_url";
/**
* Unique identifier for the app in use.
*/
public static final String OG_APP_ID = "$og_app_id";
/**
* {@link String} value denoting the deep link path to override Branch's default one. By
* default, Branch will use yourapp://open?link_click_id=12345. If you specify this key/value,
* Branch will use yourapp://'$deeplink_path'?link_click_id=12345
*/
public static final String DEEPLINK_PATH = "$deeplink_path";
/**
* {@link String} value indicating whether the link should always initiate a deep link action.
* By default, unless overridden on the dashboard, Branch will only open the app if they are
* 100% sure the app is installed. This setting will cause the link to always open the app.
* Possible values are "true" or "false"
*/
public static final String ALWAYS_DEEPLINK = "$always_deeplink";
/**
* An {@link Integer} value indicating the user to reward for applying a referral code. In this
* case, the user applying the referral code receives credit.
*/
public static final int REFERRAL_CODE_LOCATION_REFERREE = 0;
/**
* An {@link Integer} value indicating the user to reward for applying a referral code. In this
* case, the user who created the referral code receives credit.
*/
public static final int REFERRAL_CODE_LOCATION_REFERRING_USER = 2;
/**
* An {@link Integer} value indicating the user to reward for applying a referral code. In this
* case, both the creator and applicant receive credit
*/
public static final int REFERRAL_CODE_LOCATION_BOTH = 3;
/**
* An {@link Integer} value indicating the calculation type of the referral code. In this case,
* the referral code can be applied continually.
*/
public static final int REFERRAL_CODE_AWARD_UNLIMITED = 1;
/**
* An {@link Integer} value indicating the calculation type of the referral code. In this case,
* a user can only apply a specific referral code once.
*/
public static final int REFERRAL_CODE_AWARD_UNIQUE = 0;
/**
* An {@link Integer} value indicating the link type. In this case, the link can be used an
* unlimited number of times.
*/
public static final int LINK_TYPE_UNLIMITED_USE = 0;
/**
* An {@link Integer} value indicating the link type. In this case, the link can be used only
* once. After initial use, subsequent attempts will not validate.
*/
public static final int LINK_TYPE_ONE_TIME_USE = 1;
/**
* <p>An {@link Integer} variable specifying the amount of time in milliseconds to keep a
* connection alive before assuming a timeout condition.</p>
*
* @see <a href="http://developer.android.com/reference/java/util/Timer.html#schedule(java.util.TimerTask, long)">
* Timer.schedule (TimerTask task, long delay)</a>
*/
private static final int SESSION_KEEPALIVE = 2000;
/**
* <p>An {@link Integer} value defining the timeout period in milliseconds to wait during a
* looping task before triggering an actual connection close during a session close action.</p>
*/
private static final int PREVENT_CLOSE_TIMEOUT = 500;
/* Json object containing key-value pairs for debugging deep linking */
private JSONObject deeplinkDebugParams_;
private static boolean disableDeviceIDFetch_;
private boolean enableFacebookAppLinkCheck_ = false;
private static boolean isSimulatingInstalls_;
private static boolean isLogging_ = false;
private static boolean checkInstallReferrer_ = false;
private static long PLAYSTORE_REFERRAL_FETCH_WAIT_FOR = 5000;
/**
* <p>A {@link Branch} object that is instantiated on init and holds the singleton instance of
* the class during application runtime.</p>
*/
private static Branch branchReferral_;
private BranchRemoteInterface kRemoteInterface_;
private PrefHelper prefHelper_;
private final SystemObserver systemObserver_;
private Context context_;
final Object lock;
private Semaphore serverSema_;
private ServerRequestQueue requestQueue_;
private int networkCount_;
private boolean hasNetwork_;
private Map<BranchLinkData, String> linkCache_;
private ScheduledFuture<?> appListingSchedule_;
/* Set to true when application is instantiating {@BranchApp} by extending or adding manifest entry. */
private static boolean isAutoSessionMode_ = false;
/* Set to true when {@link Activity} life cycle callbacks are registered. */
private static boolean isActivityLifeCycleCallbackRegistered_ = false;
/* Enumeration for defining session initialisation state. */
private enum SESSION_STATE {
INITIALISED, INITIALISING, UNINITIALISED
}
private enum INTENT_STATE {
PENDING,
READY
}
private INTENT_STATE intentState_ = INTENT_STATE.PENDING;
private boolean handleDelayedNewIntents_ = false;
/* Holds the current Session state. Default is set to UNINITIALISED. */
private SESSION_STATE initState_ = SESSION_STATE.UNINITIALISED;
/* Instance of share link manager to share links automatically with third party applications. */
private ShareLinkManager shareLinkManager_;
/* The current activity instance for the application.*/
WeakReference<Activity> currentActivityReference_;
/* Specifies the choice of user for isReferrable setting. used to determine the link click is referrable or not. See getAutoSession for usage */
private enum CUSTOM_REFERRABLE_SETTINGS {
USE_DEFAULT, REFERRABLE, NON_REFERRABLE
}
/* By default assume user want to use the default settings. Update this option when user specify custom referrable settings */
private static CUSTOM_REFERRABLE_SETTINGS customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
/* Key to indicate whether the Activity was launched by Branch or not. */
private static final String AUTO_DEEP_LINKED = "io.branch.sdk.auto_linked";
/* Key for Auto Deep link param. The activities which need to automatically deep linked should define in this in the activity metadata. */
private static final String AUTO_DEEP_LINK_KEY = "io.branch.sdk.auto_link_keys";
/* Path for $deeplink_path or $android_deeplink_path to auto deep link. The activities which need to automatically deep linked should define in this in the activity metadata. */
private static final String AUTO_DEEP_LINK_PATH = "io.branch.sdk.auto_link_path";
/* Key for disabling auto deep link feature. Setting this to true in manifest will disable auto deep linking feature. */
private static final String AUTO_DEEP_LINK_DISABLE = "io.branch.sdk.auto_link_disable";
/*Key for defining a request code for an activity. should be added as a metadata for an activity. This is used as a request code for launching a an activity on auto deep link. */
private static final String AUTO_DEEP_LINK_REQ_CODE = "io.branch.sdk.auto_link_request_code";
/* Request code used to launch and activity on auto deep linking unless DEF_AUTO_DEEP_LINK_REQ_CODE is not specified for teh activity in manifest.*/
private static final int DEF_AUTO_DEEP_LINK_REQ_CODE = 1501;
/* Sets to true when the init session params are reported to the app though call back.*/
private boolean isInitReportedThroughCallBack = false;
private final ConcurrentHashMap<String, String> instrumentationExtraData_;
/* Name of the key for getting Fabric Branch API key from string resource */
private static final String FABRIC_BRANCH_API_KEY = "io.branch.apiKey";
private boolean isGAParamsFetchInProgress_ = false;
private List<String> externalUriWhiteList_;
private List<String> skipExternalUriHosts_;
String sessionReferredLink_; // Link which opened this application session if opened by a link click.
private static String cookieBasedMatchDomain_ = "app.link"; // Domain name used for cookie based matching.
private static int LATCH_WAIT_UNTIL = 2500; //used for getLatestReferringParamsSync and getFirstReferringParamsSync, fail after this many milliseconds
/* List of keys whose values are collected from the Intent Extra.*/
private static final String[] EXTERNAL_INTENT_EXTRA_KEY_WHITE_LIST = new String[]{
"extra_launch_uri" // Key for embedded uri in FB ads triggered intents
};
private CountDownLatch getFirstReferringParamsLatch = null;
private CountDownLatch getLatestReferringParamsLatch = null;
/* Flag for checking of Strong matching is waiting on GAID fetch */
private boolean performCookieBasedStrongMatchingOnGAIDAvailable = false;
/**
* <p>The main constructor of the Branch class is private because the class uses the Singleton
* pattern.</p>
* <p/>
* <p>Use {@link #getInstance(Context) getInstance} method when instantiating.</p>
*
* @param context A {@link Context} from which this call was made.
*/
private Branch(@NonNull Context context) {
prefHelper_ = PrefHelper.getInstance(context);
kRemoteInterface_ = new BranchRemoteInterface(context);
systemObserver_ = new SystemObserver(context);
requestQueue_ = ServerRequestQueue.getInstance(context);
serverSema_ = new Semaphore(1);
lock = new Object();
networkCount_ = 0;
hasNetwork_ = true;
linkCache_ = new HashMap<>();
instrumentationExtraData_ = new ConcurrentHashMap<>();
isGAParamsFetchInProgress_ = systemObserver_.prefetchGAdsParams(this);
InstallListener.setListener(this);
// newIntent() delayed issue is only with Android M+ devices. So need to handle android M and above
// PRS: Since this seem more reliable and not causing any integration issues adding this to all supported SDK versions
if (android.os.Build.VERSION.SDK_INT >= 15) {
handleDelayedNewIntents_ = true;
intentState_ = INTENT_STATE.PENDING;
} else {
handleDelayedNewIntents_ = false;
intentState_ = INTENT_STATE.READY;
}
externalUriWhiteList_ = new ArrayList<>();
skipExternalUriHosts_ = new ArrayList<>();
}
/**
* <p>
* Enables/Disables the test mode for the SDK. This will use the Branch Test Keys.
* This will also enable debug logs.
* Note: This is same as setting "io.branch.sdk.TestMode" to "True" in Manifest file
* </p>
*/
public static void enableTestMode() {
BranchUtil.isCustomDebugEnabled_ = true;
}
public static void disableTestMode() {
BranchUtil.isCustomDebugEnabled_ = false;
}
public void setDebug() {
enableTestMode();
}
/**
* Since play store referrer broadcast from google play is few millisecond delayed, call this method to delay Branch init for more accurate
* tracking and attribution. This will delay branch init only the first time user open the app.
* Note: Recommend 1500 to capture more than 90% of the install referrer cases per our testing as of 4/2017
*
* @param delay {@link Long} Maximum wait time for install referrer broadcast in milli seconds
*/
public static void enablePlayStoreReferrer(long delay) {
checkInstallReferrer_ = true;
PLAYSTORE_REFERRAL_FETCH_WAIT_FOR = delay;
}
static boolean checkPlayStoreReferrer() {
return checkInstallReferrer_;
}
public static long getReferralFetchWaitTime() {
return PLAYSTORE_REFERRAL_FETCH_WAIT_FOR;
}
/**
* <p>Singleton method to return the pre-initialised object of the type {@link Branch}.
* Make sure your app is instantiating {@link BranchApp} before calling this method
* or you have created an instance of Branch already by calling getInstance(Context ctx).</p>
*
* @return An initialised singleton {@link Branch} object
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getInstance() {
/* Check if BranchApp is instantiated. */
if (branchReferral_ == null) {
Log.e("BranchSDK", "Branch instance is not created yet. Make sure you have initialised Branch. [Consider Calling getInstance(Context ctx) if you still have issue.]");
} else if (isAutoSessionMode_) {
/* Check if Activity life cycle callbacks are set if in auto session mode. */
if (!isActivityLifeCycleCallbackRegistered_) {
Log.e("BranchSDK", "Branch instance is not properly initialised. Make sure your Application class is extending BranchApp class. " +
"If you are not extending BranchApp class make sure you are initialising Branch in your Applications onCreate()");
}
}
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
*
* @param context A {@link Context} from which this call was made.
* @param branchKey Your Branch key as a {@link String}.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
* @see <a href="https://github.com/BranchMetrics/Branch-Android-SDK/blob/05e234855f983ae022633eb01989adb05775532e/README.md#add-your-app-key-to-your-project">
* Adding your app key to your project</a>
*/
public static Branch getInstance(@NonNull Context context, @NonNull String branchKey) {
if (branchReferral_ == null) {
branchReferral_ = Branch.initInstance(context);
}
branchReferral_.context_ = context.getApplicationContext();
if (branchKey.startsWith("key_")) {
boolean isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(branchKey);
//on setting a new key clear link cache and pending requests
if (isNewBranchKeySet) {
branchReferral_.linkCache_.clear();
branchReferral_.requestQueue_.clear();
}
} else {
Log.e("BranchSDK", "Branch Key is invalid.Please check your BranchKey");
}
return branchReferral_;
}
private static Branch getBranchInstance(@NonNull Context context, boolean isLive) {
if (branchReferral_ == null) {
branchReferral_ = Branch.initInstance(context);
String branchKey = branchReferral_.prefHelper_.readBranchKey(isLive);
boolean isNewBranchKeySet;
if (branchKey == null || branchKey.equalsIgnoreCase(PrefHelper.NO_STRING_VALUE)) {
// If Branch key is not available check for Fabric provided Branch key
String fabricBranchApiKey = null;
try {
Resources resources = context.getResources();
fabricBranchApiKey = resources.getString(resources.getIdentifier(FABRIC_BRANCH_API_KEY, "string", context.getPackageName()));
} catch (Exception ignore) {
}
if (!TextUtils.isEmpty(fabricBranchApiKey)) {
isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(fabricBranchApiKey);
} else {
Log.i("BranchSDK", "Branch Warning: Please enter your branch_key in your project's Manifest file!");
isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(PrefHelper.NO_STRING_VALUE);
}
} else {
isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(branchKey);
}
//on setting a new key clear link cache and pending requests
if (isNewBranchKeySet) {
branchReferral_.linkCache_.clear();
branchReferral_.requestQueue_.clear();
}
branchReferral_.context_ = context.getApplicationContext();
/* If {@link Application} is instantiated register for activity life cycle events. */
if (context instanceof Application) {
isAutoSessionMode_ = true;
branchReferral_.setActivityLifeCycleObserver((Application) context);
}
}
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
public static Branch getInstance(@NonNull Context context) {
return getBranchInstance(context, true);
}
/**
* <p>If you configured the your Strings file according to the guide, you'll be able to use
* the test version of your app by just calling this static method before calling initSession.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object.
*/
public static Branch getTestInstance(@NonNull Context context) {
return getBranchInstance(context, false);
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoInstance(@NonNull Context context) {
isAutoSessionMode_ = true;
customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
boolean isLive = !BranchUtil.isTestModeEnabled(context);
getBranchInstance(context, isLive);
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @param isReferrable A {@link Boolean} value indicating whether initialising a session on this Branch instance
* should be considered as potentially referrable or not. By default, a user is only referrable
* if initSession results in a fresh install. Overriding this gives you control of who is referrable.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoInstance(@NonNull Context context, boolean isReferrable) {
isAutoSessionMode_ = true;
customReferrableSettings_ = isReferrable ? CUSTOM_REFERRABLE_SETTINGS.REFERRABLE : CUSTOM_REFERRABLE_SETTINGS.NON_REFERRABLE;
boolean isDebug = BranchUtil.isTestModeEnabled(context);
getBranchInstance(context, !isDebug);
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @param branchKey A {@link String} value used to initialize Branch.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoInstance(@NonNull Context context, @NonNull String branchKey) {
isAutoSessionMode_ = true;
customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
boolean isLive = !BranchUtil.isTestModeEnabled(context);
getBranchInstance(context, isLive);
if (branchKey.startsWith("key_")) {
boolean isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(branchKey);
//on setting a new key clear link cache and pending requests
if (isNewBranchKeySet) {
branchReferral_.linkCache_.clear();
branchReferral_.requestQueue_.clear();
}
} else {
Log.e("BranchSDK", "Branch Key is invalid.Please check your BranchKey");
}
return branchReferral_;
}
/**
* <p>If you configured the your Strings file according to the guide, you'll be able to use
* the test version of your app by just calling this static method before calling initSession.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoTestInstance(@NonNull Context context) {
isAutoSessionMode_ = true;
customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
getBranchInstance(context, false);
return branchReferral_;
}
/**
* <p>If you configured the your Strings file according to the guide, you'll be able to use
* the test version of your app by just calling this static method before calling initSession.</p>
*
* @param context A {@link Context} from which this call was made.
* @param isReferrable A {@link Boolean} value indicating whether initialising a session on this Branch instance
* should be considered as potentially referrable or not. By default, a user is only referrable
* if initSession results in a fresh install. Overriding this gives you control of who is referrable.
* @return An initialised {@link Branch} object.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoTestInstance(@NonNull Context context, boolean isReferrable) {
isAutoSessionMode_ = true;
customReferrableSettings_ = isReferrable ? CUSTOM_REFERRABLE_SETTINGS.REFERRABLE : CUSTOM_REFERRABLE_SETTINGS.NON_REFERRABLE;
getBranchInstance(context, false);
return branchReferral_;
}
/**
* <p>Initialises an instance of the Branch object.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object.
*/
private static Branch initInstance(@NonNull Context context) {
return new Branch(context.getApplicationContext());
}
/**
* <p>Manually sets the {@link Boolean} value, that indicates that the Branch API connection has
* been initialised, to false - forcing re-initialisation.</p>
*/
public void resetUserSession() {
initState_ = SESSION_STATE.UNINITIALISED;
}
/**
* <p>Sets the number of times to re-attempt a timed-out request to the Branch API, before
* considering the request to have failed entirely. Default 5.</p>
*
* @param retryCount An {@link Integer} specifying the number of times to retry before giving
* up and declaring defeat.
*/
public void setRetryCount(int retryCount) {
if (prefHelper_ != null && retryCount >= 0) {
prefHelper_.setRetryCount(retryCount);
}
}
/**
* <p>Sets the amount of time in milliseconds to wait before re-attempting a timed-out request
* to the Branch API. Default 3000 ms.</p>
*
* @param retryInterval An {@link Integer} value specifying the number of milliseconds to
* wait before re-attempting a timed-out request.
*/
public void setRetryInterval(int retryInterval) {
if (prefHelper_ != null && retryInterval > 0) {
prefHelper_.setRetryInterval(retryInterval);
}
}
/**
* <p>Sets the duration in milliseconds that the system should wait for a response before considering
* any Branch API call to have timed out. Default 3000 ms.</p>
* <p>Increase this to perform better in low network speed situations, but at the expense of
* responsiveness to error situation.</p>
*
* @param timeout An {@link Integer} value specifying the number of milliseconds to wait before
* considering the request to have timed out.
*/
public void setNetworkTimeout(int timeout) {
if (prefHelper_ != null && timeout > 0) {
prefHelper_.setTimeout(timeout);
}
}
/**
* Method to control reading Android ID from device. Set this to true to disable reading the device id.
* This method should be called from your {@link Application#onCreate()} method before creating Branch auto instance by calling {@link Branch#getAutoInstance(Context)}
*
* @param deviceIdFetch {@link Boolean with value true to disable reading the Android id from device}
*/
public static void disableDeviceIDFetch(Boolean deviceIdFetch) {
disableDeviceIDFetch_ = deviceIdFetch;
}
/**
* Returns true if reading device id is disabled
*
* @return {@link Boolean} with value true to disable reading Andoid ID
*/
public static boolean isDeviceIDFetchDisabled() {
return disableDeviceIDFetch_;
}
/**
* Sets the key-value pairs for debugging the deep link. The key-value set in debug mode is given back with other deep link data on branch init session.
* This method should be called from onCreate() of activity which listens to Branch Init Session callbacks
*
* @param debugParams A {@link JSONObject} containing key-value pairs for debugging branch deep linking
*/
public void setDeepLinkDebugMode(JSONObject debugParams) {
deeplinkDebugParams_ = debugParams;
}
/**
* <p>Calls the {@link PrefHelper#disableExternAppListing()} on the local instance to prevent
* a list of installed apps from being returned to the Branch API.</p>
*/
public void disableAppList() {
prefHelper_.disableExternAppListing();
}
/**
* <p>
* Enable Facebook app link check operation during Branch initialisation.
* </p>
*/
public void enableFacebookAppLinkCheck() {
enableFacebookAppLinkCheck_ = true;
}
/**
* <p>Add key value pairs to all requests</p>
*/
public void setRequestMetadata(@NonNull String key, @NonNull String value) {
prefHelper_.setRequestMetadata(key, value);
}
/**
* <p>Initialises a session with the Branch API, assigning a {@link BranchUniversalReferralInitListener}
* to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called following
* successful (or unsuccessful) initialisation of the session with the Branch API.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback) {
return initSession(callback, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API, assigning a {@link BranchReferralInitListener}
* to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called following
* successful (or unsuccessful) initialisation of the session with the Branch API.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback) {
return initSession(callback, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API, passing the {@link Activity} and assigning a
* {@link BranchUniversalReferralInitListener} to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, Activity activity) {
if (customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT) {
initUserSessionInternal(callback, activity, true);
} else {
boolean isReferrable = customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.REFERRABLE;
initUserSessionInternal(callback, activity, isReferrable);
}
return true;
}
/**
* <p>Initialises a session with the Branch API, passing the {@link Activity} and assigning a
* {@link BranchReferralInitListener} to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, Activity activity) {
if (customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT) {
initUserSessionInternal(callback, activity, true);
} else {
boolean isReferrable = customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.REFERRABLE;
initUserSessionInternal(callback, activity, isReferrable);
}
return true;
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, @NonNull Uri data) {
return initSession(callback, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchReferralInitListener callback, @NonNull Uri data) {
return initSession(callback, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
initSession(callback, activity);
return true;
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchReferralInitListener callback, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession(callback, activity);
}
/**
* <p>Initialises a session with the Branch API, without a callback or {@link Activity}.</p>
*
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession() {
return initSession((Activity) null);
}
/**
* <p>Initialises a session with the Branch API, without a callback or {@link Activity}.</p>
*
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(Activity activity) {
return initSession((BranchReferralInitListener) null, activity);
}
/**
* <p>Initialises a session with the Branch API, with associated data from the supplied
* {@link Uri}.</p>
*
* @param data A {@link Uri} variable containing the details of the source link that
* led to this
* initialisation action.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSessionWithData(@NonNull Uri data) {
return initSessionWithData(data, null);
}
/**
* <p>Initialises a session with the Branch API, with associated data from the supplied
* {@link Uri}.</p>
*
* @param data A {@link Uri} variable containing the details of the source link that led to this
* initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSessionWithData(Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession((BranchReferralInitListener) null, activity);
}
/**
* <p>Initialises a session with the Branch API, specifying whether the initialisation can count
* as a referrable action.</p>
*
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(boolean isReferrable) {
return initSession((BranchReferralInitListener) null, isReferrable, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API, specifying whether the initialisation can count
* as a referrable action, and supplying the calling {@link Activity} for context.</p>
*
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(boolean isReferrable, @NonNull Activity activity) {
return initSession((BranchReferralInitListener) null, isReferrable, activity);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable, Uri data) {
return initSession(callback, isReferrable, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable, @NonNull Uri data) {
return initSession(callback, isReferrable, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession(callback, isReferrable, activity);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession(callback, isReferrable, activity);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable) {
return initSession(callback, isReferrable, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable) {
return initSession(callback, isReferrable, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable, Activity activity) {
initUserSessionInternal(callback, activity, isReferrable);
return true;
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable, Activity activity) {
initUserSessionInternal(callback, activity, isReferrable);
return true;
}
private void initUserSessionInternal(BranchUniversalReferralInitListener callback, Activity activity, boolean isReferrable) {
BranchUniversalReferralInitWrapper branchUniversalReferralInitWrapper = new BranchUniversalReferralInitWrapper(callback);
initUserSessionInternal(branchUniversalReferralInitWrapper, activity, isReferrable);
}
private void initUserSessionInternal(BranchReferralInitListener callback, Activity activity, boolean isReferrable) {
if (activity != null) {
currentActivityReference_ = new WeakReference<>(activity);
}
//If already initialised
if (hasUser() && hasSession() && initState_ == SESSION_STATE.INITIALISED) {
if (callback != null) {
if (isAutoSessionMode_) {
// Since Auto session mode initialise the session by itself on starting the first activity, we need to provide user
// the referring params if they call init session after init is completed. Note that user wont do InitSession per activity in auto session mode.
if (!isInitReportedThroughCallBack) { //Check if session params are reported already in case user call initsession form a different activity(not a noraml case)
callback.onInitFinished(getLatestReferringParams(), null);
isInitReportedThroughCallBack = true;
} else {
callback.onInitFinished(new JSONObject(), null);
}
} else {
// Since user will do init session per activity in non auto session mode , we don't want to repeat the referring params with each initSession()call.
callback.onInitFinished(new JSONObject(), null);
}
}
}
//If uninitialised or initialising
else {
// In case of Auto session init will be called from Branch before user. So initialising
// State also need to look for isReferrable value
if (isReferrable) {
this.prefHelper_.setIsReferrable();
} else {
this.prefHelper_.clearIsReferrable();
}
//If initialising ,then set new callbacks.
if (initState_ == SESSION_STATE.INITIALISING) {
if (callback != null) {
requestQueue_.setInstallOrOpenCallback(callback);
}
}
//if Uninitialised move request to the front if there is an existing request or create a new request.
else {
initState_ = SESSION_STATE.INITIALISING;
initializeSession(callback);
}
}
}
/**
* <p>Closes the current session, dependent on the state of the
* PrefHelper#getSmartSession() {@link Boolean} value. If <i>true</i>, take no action.
* If false, close the session via the {@link #executeClose()} method.</p>
* <p>Note that if smartSession is enabled, closeSession cannot be called within
* a 2 second time span of another Branch action. This has to do with the method that
* Branch uses to keep a session alive during Activity transitions</p>
*
* @deprecated This method is deprecated from SDK v1.14.6. Session Start and close are automatically handled by Branch.
* In case you need to handle sessions manually inorder to support minimum sdk version less than 14 please consider using
* SDK version 1.14.5
*/
public void closeSession() {
Log.w("BranchSDK", "closeSession() method is deprecated from SDK v1.14.6.Session is automatically handled by Branch." +
"In case you need to handle sessions manually inorder to support minimum sdk version less than 14 please consider using " +
" SDK version 1.14.5");
}
/*
* <p>Closes the current session. Should be called by on getting the last actvity onStop() event.
* </p>
*/
private void closeSessionInternal() {
executeClose();
sessionReferredLink_ = null;
if (prefHelper_.getExternAppListing()) {
if (appListingSchedule_ == null) {
scheduleListOfApps();
}
}
}
/**
* <p>
* Enabled Strong matching check using chrome cookies. This method should be called before
* Branch#getAutoInstance(Context).</p>
*
* @param cookieMatchDomain The domain for the url used to match the cookie (eg. example.app.link)
*/
public static void enableCookieBasedMatching(String cookieMatchDomain) {
cookieBasedMatchDomain_ = cookieMatchDomain;
}
/**
* <p>
* Enabled Strong matching check using chrome cookies. This method should be called before
* Branch#getAutoInstance(Context).</p>
*
* @param cookieMatchDomain The domain for the url used to match the cookie (eg. example.app.link)
* @param delay Time in millisecond to wait for the strong match to check to finish before Branch init session is called.
* Default time is 750 msec.
*/
public static void enableCookieBasedMatching(String cookieMatchDomain, int delay) {
cookieBasedMatchDomain_ = cookieMatchDomain;
BranchStrongMatchHelper.getInstance().setStrongMatchUrlHitDelay(delay);
}
/**
* <p>Perform the state-safe actions required to terminate any open session, and report the
* closed application event to the Branch API.</p>
*/
private void executeClose() {
if (initState_ != SESSION_STATE.UNINITIALISED) {
if (!hasNetwork_) {
// if there's no network connectivity, purge the old install/open
ServerRequest req = requestQueue_.peek();
if (req != null && (req instanceof ServerRequestRegisterInstall) || (req instanceof ServerRequestRegisterOpen)) {
requestQueue_.dequeue();
}
} else {
if (!requestQueue_.containsClose()) {
ServerRequest req = new ServerRequestRegisterClose(context_);
handleNewRequest(req);
}
}
initState_ = SESSION_STATE.UNINITIALISED;
}
}
private boolean readAndStripParam(Uri data, Activity activity) {
if (intentState_ == INTENT_STATE.READY) {
// Capture the intent URI and extra for analytics in case started by external intents such as google app search
try {
if (data != null) {
boolean foundSchemeMatch;
boolean skipThisHost = false;
if (externalUriWhiteList_.size() > 0) {
foundSchemeMatch = externalUriWhiteList_.contains(data.getScheme());
} else {
foundSchemeMatch = true;
}
if (skipExternalUriHosts_.size() > 0) {
for (String host : skipExternalUriHosts_) {
String externalHost = data.getHost();
if (externalHost != null && externalHost.equals(host)) {
skipThisHost = true;
break;
}
}
}
if (foundSchemeMatch && !skipThisHost) {
sessionReferredLink_ = data.toString();
prefHelper_.setExternalIntentUri(data.toString());
if (activity != null && activity.getIntent() != null && activity.getIntent().getExtras() != null) {
Bundle bundle = activity.getIntent().getExtras();
Set<String> extraKeys = bundle.keySet();
if (extraKeys.size() > 0) {
JSONObject extrasJson = new JSONObject();
for (String key : EXTERNAL_INTENT_EXTRA_KEY_WHITE_LIST) {
if (extraKeys.contains(key)) {
extrasJson.put(key, bundle.get(key));
}
}
if (extrasJson.length() > 0) {
prefHelper_.setExternalIntentExtra(extrasJson.toString());
}
}
}
}
}
} catch (Exception ignore) {
}
//Check for any push identifier in case app is launched by a push notification
try {
if (activity != null && activity.getIntent() != null && activity.getIntent().getExtras() != null) {
if (activity.getIntent().getExtras().getBoolean(Defines.Jsonkey.BranchLinkUsed.getKey()) == false) {
String pushIdentifier = activity.getIntent().getExtras().getString(Defines.Jsonkey.AndroidPushNotificationKey.getKey()); // This seems producing unmarshalling errors in some corner cases
if (pushIdentifier != null && pushIdentifier.length() > 0) {
prefHelper_.setPushIdentifier(pushIdentifier);
Intent thisIntent = activity.getIntent();
thisIntent.putExtra(Defines.Jsonkey.BranchLinkUsed.getKey(), true);
activity.setIntent(thisIntent);
return false;
}
}
}
} catch (Exception ignore) {
}
//Check for link click id or app link
if (data != null && data.isHierarchical() && activity != null) {
try {
if (data.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey()) != null) {
prefHelper_.setLinkClickIdentifier(data.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey()));
String paramString = "link_click_id=" + data.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey());
String uriString = null;
if (activity.getIntent() != null) {
uriString = activity.getIntent().getDataString();
}
if (data.getQuery().length() == paramString.length()) {
paramString = "\\?" + paramString;
} else if (uriString != null && (uriString.length() - paramString.length()) == uriString.indexOf(paramString)) {
paramString = "&" + paramString;
} else {
paramString = paramString + "&";
}
if (uriString != null) {
Uri newData = Uri.parse(uriString.replaceFirst(paramString, ""));
activity.getIntent().setData(newData);
} else {
Log.w(TAG, "Branch Warning. URI for the launcher activity is null. Please make sure that intent data is not set to null before calling Branch#InitSession ");
}
return true;
} else {
// Check if the clicked url is an app link pointing to this app
String scheme = data.getScheme();
Intent intent = activity.getIntent();
if (scheme != null && intent != null) {
// On Launching app from the recent apps, Android Start the app with the original intent data. So up in opening app from recent list
// Intent will have App link in data and lead to issue of getting wrong parameters. (In case of link click id since we are looking for actual link click on back end this case will never happen)
if ((intent.getFlags() & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0) {
if ((scheme.equalsIgnoreCase("http") || scheme.equalsIgnoreCase("https"))
&& data.getHost() != null && data.getHost().length() > 0 && !intent.getBooleanExtra(Defines.Jsonkey.BranchLinkUsed.getKey(), false)) {
prefHelper_.setAppLink(data.toString());
intent.putExtra(Defines.Jsonkey.BranchLinkUsed.getKey(), true);
activity.setIntent(intent);
return false;
}
}
}
}
} catch (Exception ignore) {
}
}
}
return false;
}
@Override
public void onGAdsFetchFinished() {
isGAParamsFetchInProgress_ = false;
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.GAID_FETCH_WAIT_LOCK);
if (performCookieBasedStrongMatchingOnGAIDAvailable) {
performCookieBasedStrongMatch();
performCookieBasedStrongMatchingOnGAIDAvailable = false;
} else {
processNextQueueItem();
}
}
@Override
public void onInstallReferrerEventsFinished() {
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.INSTALL_REFERRER_FETCH_WAIT_LOCK);
processNextQueueItem();
}
/**
* Add the given URI Scheme to the external Uri white list. Branch will collect
* external intent uri only if white list matches with the app opened URL properties
* If no URI is added to the white list branch will collect all external intent uris.
* White list schemes should be added immediately after calling {@link Branch#getAutoInstance(Context)}
*
* @param uriScheme {@link String} Case sensitive Uri scheme to be added to the external intent uri white list.(eg. "my_scheme://")
* @return {@link Branch} instance for successive method calls
*/
public Branch addWhiteListedScheme(String uriScheme) {
if (uriScheme == null) {
return this;
}
uriScheme = uriScheme.replace("://", "");
externalUriWhiteList_.add(uriScheme);
return this;
}
/**
* <p>Set the given list of URI Scheme as the external Uri white list. Branch will collect
* external intent uri only for Uris in white list.
* </p>
* If no URI is added to the white list branch will collect all external intent uris
* White list should be set immediately after calling {@link Branch#getAutoInstance(Context)}
* <!-- @param uriSchemes {@link List<String>} List of case sensitive Uri schemes to set as the white list -->
*
* @return {@link Branch} instance for successive method calls
*/
public Branch setWhiteListedSchemes(List<String> uriSchemes) {
externalUriWhiteList_ = uriSchemes;
return this;
}
/**
* <p>
* Add the given URI host to the external Uri skip list. Branch will not collect
* external intent uri if skip list contains with the app opened URL.
* If no host is added to the skip list, Branch will collect all external Intent uris.
* Skip list hosts should be added immediately after calling {@link Branch#getAutoInstance(Context)}.
* </p>
*
* @param hostName {@link String} Case sensitive Uri path to be added to the external Intent uri skip list. (e.g. "product" to skip my-scheme://product/*)
* @return {@link Branch} instance for successive method calls
*/
public Branch addUriHostsToSkip(String hostName) {
if ((hostName != null) && (!hostName.equals("")))
skipExternalUriHosts_.add(hostName);
return this;
}
/**
* <p>Identifies the current user to the Branch API by supplying a unique identifier as a
* {@link String} value. No callback.</p>
*
* @param userId A {@link String} value containing the unique identifier of the user.
*/
public void setIdentity(@NonNull String userId) {
setIdentity(userId, null);
}
/**
* <p>Identifies the current user to the Branch API by supplying a unique identifier as a
* {@link String} value, with a callback specified to perform a defined action upon successful
* response to request.</p>
*
* @param userId A {@link String} value containing the unique identifier of the user.
* @param callback A {@link BranchReferralInitListener} callback instance that will return
* the data associated with the user id being assigned, if available.
*/
public void setIdentity(@NonNull String userId, @Nullable BranchReferralInitListener
callback) {
ServerRequest req = new ServerRequestIdentifyUserRequest(context_, callback, userId);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
} else {
if (((ServerRequestIdentifyUserRequest) req).isExistingID()) {
((ServerRequestIdentifyUserRequest) req).handleUserExist(branchReferral_);
}
}
}
/**
* Indicates whether or not this user has a custom identity specified for them. Note that this is independent of installs.
* If you call setIdentity, this device will have that identity associated with this user until logout is called.
* This includes persisting through uninstalls, as we track device id.
*
* @return A {@link Boolean} value that will return <i>true</i> only if user already has an identity.
*/
public boolean isUserIdentified() {
return !prefHelper_.getIdentity().equals(PrefHelper.NO_STRING_VALUE);
}
/**
* <p>This method should be called if you know that a different person is about to use the app. For example,
* if you allow users to log out and let their friend use the app, you should call this to notify Branch
* to create a new user for this device. This will clear the first and latest params, as a new session is created.</p>
*/
public void logout() {
logout(null);
}
/**
* <p>This method should be called if you know that a different person is about to use the app. For example,
* if you allow users to log out and let their friend use the app, you should call this to notify Branch
* to create a new user for this device. This will clear the first and latest params, as a new session is created.</p>
*
* @param callback An instance of {@link io.branch.referral.Branch.LogoutStatusListener} to callback with the logout operation status.
*/
public void logout(LogoutStatusListener callback) {
ServerRequest req = new ServerRequestLogout(context_, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>Fire-and-forget retrieval of rewards for the current session. Without a callback.</p>
*/
public void loadRewards() {
loadRewards(null);
}
/**
* <p>Retrieves rewards for the current session, with a callback to perform a predefined
* action following successful report of state change. You'll then need to call getCredits
* in the callback to update the credit totals in your UX.</p>
*
* @param callback A {@link BranchReferralStateChangedListener} callback instance that will
* trigger actions defined therein upon a referral state change.
*/
public void loadRewards(BranchReferralStateChangedListener callback) {
ServerRequest req = new ServerRequestGetRewards(context_, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>Retrieve the number of credits available for the "default" bucket.</p>
*
* @return An {@link Integer} value of the number credits available in the "default" bucket.
*/
public int getCredits() {
return prefHelper_.getCreditCount();
}
/**
* Returns an {@link Integer} of the number of credits available for use within the supplied
* bucket name.
*
* @param bucket A {@link String} value indicating the name of the bucket to get credits for.
* @return An {@link Integer} value of the number credits available in the specified
* bucket.
*/
public int getCreditsForBucket(String bucket) {
return prefHelper_.getCreditCount(bucket);
}
/**
* <p>Redeems the specified number of credits from the "default" bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the bucket.
*/
public void redeemRewards(int count) {
redeemRewards(Defines.Jsonkey.DefaultBucket.getKey(), count, null);
}
/**
* <p>Redeems the specified number of credits from the "default" bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the bucket.
* @param callback A {@link BranchReferralStateChangedListener} callback instance that will
* trigger actions defined therein upon a executing redeem rewards.
*/
public void redeemRewards(int count, BranchReferralStateChangedListener callback) {
redeemRewards(Defines.Jsonkey.DefaultBucket.getKey(), count, callback);
}
/**
* <p>Redeems the specified number of credits from the named bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket to attempt
* to redeem credits from.
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the specified bucket.
*/
public void redeemRewards(@NonNull final String bucket, final int count) {
redeemRewards(bucket, count, null);
}
/**
* <p>Redeems the specified number of credits from the named bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket to attempt
* to redeem credits from.
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the specified bucket.
* @param callback A {@link BranchReferralStateChangedListener} callback instance that will
* trigger actions defined therein upon a executing redeem rewards.
*/
public void redeemRewards(@NonNull final String bucket,
final int count, BranchReferralStateChangedListener callback) {
ServerRequestRedeemRewards req = new ServerRequestRedeemRewards(context_, bucket, count, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(BranchListResponseListener callback) {
getCreditHistory(null, null, 100, CreditHistoryOrder.kMostRecentFirst, callback);
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket that the
* code will belong to.
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(@NonNull final String bucket, BranchListResponseListener
callback) {
getCreditHistory(bucket, null, 100, CreditHistoryOrder.kMostRecentFirst, callback);
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param afterId A {@link String} value containing the ID of the history record to begin after.
* This allows for a partial history to be retrieved, rather than the entire
* credit history of the bucket.
* @param length A {@link Integer} value containing the number of credit history records to
* return.
* @param order A {@link CreditHistoryOrder} object indicating which order the results should
* be returned in.
* <p>Valid choices:</p>
* <ul>
* <li>{@link CreditHistoryOrder#kMostRecentFirst}</li>
* <li>{@link CreditHistoryOrder#kLeastRecentFirst}</li>
* </ul>
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(@NonNull final String afterId, final int length,
@NonNull final CreditHistoryOrder order, BranchListResponseListener callback) {
getCreditHistory(null, afterId, length, order, callback);
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket that the
* code will belong to.
* @param afterId A {@link String} value containing the ID of the history record to begin after.
* This allows for a partial history to be retrieved, rather than the entire
* credit history of the bucket.
* @param length A {@link Integer} value containing the number of credit history records to
* return.
* @param order A {@link CreditHistoryOrder} object indicating which order the results should
* be returned in.
* <p>Valid choices:</p>
* <ul>
* <li>{@link CreditHistoryOrder#kMostRecentFirst}</li>
* <li>{@link CreditHistoryOrder#kLeastRecentFirst}</li>
* </ul>
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(final String bucket, final String afterId, final int length,
@NonNull final CreditHistoryOrder order, BranchListResponseListener callback) {
ServerRequest req = new ServerRequestGetRewardHistory(context_, bucket, afterId, length, order, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API, with additional app-defined meta data to go along with that action.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
* @param metadata A {@link JSONObject} containing app-defined meta-data to be attached to a
* user action that has just been completed.
*/
public void userCompletedAction(@NonNull final String action, JSONObject metadata) {
userCompletedAction(action, metadata, null);
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
*/
public void userCompletedAction(final String action) {
userCompletedAction(action, null, null);
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
* @param callback instance of {@link BranchViewHandler.IBranchViewEvents} to listen Branch view events
*/
public void userCompletedAction(final String action, BranchViewHandler.
IBranchViewEvents callback) {
userCompletedAction(action, null, callback);
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API, with additional app-defined meta data to go along with that action.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
* @param metadata A {@link JSONObject} containing app-defined meta-data to be attached to a
* user action that has just been completed.
* @param callback instance of {@link BranchViewHandler.IBranchViewEvents} to listen Branch view events
*/
public void userCompletedAction(@NonNull final String action, JSONObject
metadata, BranchViewHandler.IBranchViewEvents callback) {
if (metadata != null) {
metadata = BranchUtil.filterOutBadCharacters(metadata);
}
ServerRequest req = new ServerRequestActionCompleted(context_, action, metadata, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
public void sendCommerceEvent(@NonNull CommerceEvent commerceEvent, JSONObject
metadata, BranchViewHandler.IBranchViewEvents callback) {
if (metadata != null) {
metadata = BranchUtil.filterOutBadCharacters(metadata);
}
ServerRequest req = new ServerRequestRActionCompleted(context_, commerceEvent, metadata, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
public void sendCommerceEvent(@NonNull CommerceEvent commerceEvent) {
sendCommerceEvent(commerceEvent, null, null);
}
/**
* <p>Returns the parameters associated with the link that referred the user. This is only set once,
* the first time the user is referred by a link. Think of this as the user referral parameters.
* It is also only set if isReferrable is equal to true, which by default is only true
* on a fresh install (not upgrade or reinstall). This will change on setIdentity (if the
* user already exists from a previous device) and logout.</p>
*
* @return A {@link JSONObject} containing the install-time parameters as configured
* locally.
*/
public JSONObject getFirstReferringParams() {
String storedParam = prefHelper_.getInstallParams();
JSONObject firstReferringParams = convertParamsStringToDictionary(storedParam);
firstReferringParams = appendDebugParams(firstReferringParams);
return firstReferringParams;
}
/**
* <p>This function must be called from a non-UI thread! If Branch has no install link data,
* and this func is called, it will return data upon initializing, or until LATCH_WAIT_UNTIL.
* Returns the parameters associated with the link that referred the user. This is only set once,
* the first time the user is referred by a link. Think of this as the user referral parameters.
* It is also only set if isReferrable is equal to true, which by default is only true
* on a fresh install (not upgrade or reinstall). This will change on setIdentity (if the
* user already exists from a previous device) and logout.</p>
*
* @return A {@link JSONObject} containing the install-time parameters as configured
* locally.
*/
public JSONObject getFirstReferringParamsSync() {
getFirstReferringParamsLatch = new CountDownLatch(1);
if (prefHelper_.getInstallParams().equals(PrefHelper.NO_STRING_VALUE)) {
try {
getFirstReferringParamsLatch.await(LATCH_WAIT_UNTIL, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
}
}
String storedParam = prefHelper_.getInstallParams();
JSONObject firstReferringParams = convertParamsStringToDictionary(storedParam);
firstReferringParams = appendDebugParams(firstReferringParams);
getFirstReferringParamsLatch = null;
return firstReferringParams;
}
/**
* <p>Returns the parameters associated with the link that referred the session. If a user
* clicks a link, and then opens the app, initSession will return the parameters of the link
* and then set them in as the latest parameters to be retrieved by this method. By default,
* sessions persist for the duration of time that the app is in focus. For example, if you
* minimize the app, these parameters will be cleared when closeSession is called.</p>
*
* @return A {@link JSONObject} containing the latest referring parameters as
* configured locally.
*/
public JSONObject getLatestReferringParams() {
String storedParam = prefHelper_.getSessionParams();
JSONObject latestParams = convertParamsStringToDictionary(storedParam);
latestParams = appendDebugParams(latestParams);
return latestParams;
}
/**
* <p>This function must be called from a non-UI thread! If Branch has not been initialized
* and this func is called, it will return data upon initialization, or until LATCH_WAIT_UNTIL.
* Returns the parameters associated with the link that referred the session. If a user
* clicks a link, and then opens the app, initSession will return the parameters of the link
* and then set them in as the latest parameters to be retrieved by this method. By default,
* sessions persist for the duration of time that the app is in focus. For example, if you
* minimize the app, these parameters will be cleared when closeSession is called.</p>
*
* @return A {@link JSONObject} containing the latest referring parameters as
* configured locally.
*/
public JSONObject getLatestReferringParamsSync() {
getLatestReferringParamsLatch = new CountDownLatch(1);
try {
if (initState_ != SESSION_STATE.INITIALISED) {
getLatestReferringParamsLatch.await(LATCH_WAIT_UNTIL, TimeUnit.MILLISECONDS);
}
} catch (InterruptedException e) {
}
String storedParam = prefHelper_.getSessionParams();
JSONObject latestParams = convertParamsStringToDictionary(storedParam);
latestParams = appendDebugParams(latestParams);
getLatestReferringParamsLatch = null;
return latestParams;
}
/**
* Append the deep link debug params to the original params
*
* @param originalParams A {@link JSONObject} original referrer parameters
* @return A new {@link JSONObject} with debug params appended.
*/
private JSONObject appendDebugParams(JSONObject originalParams) {
try {
if (originalParams != null && deeplinkDebugParams_ != null) {
if (deeplinkDebugParams_.length() > 0) {
Log.w(TAG, "You're currently in deep link debug mode. Please comment out 'setDeepLinkDebugMode' to receive the deep link parameters from a real Branch link");
}
Iterator<String> keys = deeplinkDebugParams_.keys();
while (keys.hasNext()) {
String key = keys.next();
originalParams.put(key, deeplinkDebugParams_.get(key));
}
}
} catch (Exception ignore) {
}
return originalParams;
}
public JSONObject getDeeplinkDebugParams() {
if (deeplinkDebugParams_ != null && deeplinkDebugParams_.length() > 0) {
Log.w(TAG, "You're currently in deep link debug mode. Please comment out 'setDeepLinkDebugMode' to receive the deep link parameters from a real Branch link");
}
return deeplinkDebugParams_;
}
//-----------------Generate Short URL -------------------------------------------//
/**
* <p> Generates a shorl url for the given {@link ServerRequestCreateUrl} object </p>
*
* @param req An instance of {@link ServerRequestCreateUrl} with parameters create the short link.
* @return A url created with the given request if the request is synchronous else null.
* Note : This method can be used only internally. Use {@link BranchUrlBuilder} for creating short urls.
*/
String generateShortLinkInternal(ServerRequestCreateUrl req) {
if (!req.constructError_ && !req.handleErrors(context_)) {
if (linkCache_.containsKey(req.getLinkPost())) {
String url = linkCache_.get(req.getLinkPost());
req.onUrlAvailable(url);
return url;
} else {
if (req.isAsync()) {
generateShortLinkAsync(req);
} else {
return generateShortLinkSync(req);
}
}
}
return null;
}
/**
* <p>Creates options for sharing a link with other Applications. Creates a link with given attributes and shares with the
* user selected clients.</p>
*
* @param builder A {@link io.branch.referral.Branch.ShareLinkBuilder} instance to build share link.
*/
private void shareLink(ShareLinkBuilder builder) {
//Cancel any existing sharing in progress.
if (shareLinkManager_ != null) {
shareLinkManager_.cancelShareLinkDialog(true);
}
shareLinkManager_ = new ShareLinkManager();
shareLinkManager_.shareLink(builder);
}
/**
* <p>Cancel current share link operation and Application selector dialog. If your app is not using auto session management, make sure you are
* calling this method before your activity finishes inorder to prevent any window leak. </p>
*
* @param animateClose A {@link Boolean} to specify whether to close the dialog with an animation.
* A value of true will close the dialog with an animation. Setting this value
* to false will close the Dialog immediately.
*/
public void cancelShareLinkDialog(boolean animateClose) {
if (shareLinkManager_ != null) {
shareLinkManager_.cancelShareLinkDialog(animateClose);
}
}
// PRIVATE FUNCTIONS
private String convertDate(Date date) {
return android.text.format.DateFormat.format("yyyy-MM-dd", date).toString();
}
private String generateShortLinkSync(ServerRequestCreateUrl req) {
if (initState_ == SESSION_STATE.INITIALISED) {
ServerResponse response = null;
try {
int timeOut = prefHelper_.getTimeout() + 2000; // Time out is set to slightly more than link creation time to prevent any edge case
response = new getShortLinkTask().execute(req).get(timeOut, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException ignore) {
}
String url = null;
if (req.isDefaultToLongUrl()) {
url = req.getLongUrl();
}
if (response != null && response.getStatusCode() == HttpURLConnection.HTTP_OK) {
try {
url = response.getObject().getString("url");
if (req.getLinkPost() != null) {
linkCache_.put(req.getLinkPost(), url);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
return url;
} else {
Log.i("BranchSDK", "Branch Warning: User session has not been initialized");
}
return null;
}
private void generateShortLinkAsync(final ServerRequest req) {
handleNewRequest(req);
}
private JSONObject convertParamsStringToDictionary(String paramString) {
if (paramString.equals(PrefHelper.NO_STRING_VALUE)) {
return new JSONObject();
} else {
try {
return new JSONObject(paramString);
} catch (JSONException e) {
byte[] encodedArray = Base64.decode(paramString.getBytes(), Base64.NO_WRAP);
try {
return new JSONObject(new String(encodedArray));
} catch (JSONException ex) {
ex.printStackTrace();
return new JSONObject();
}
}
}
}
/**
* <p>Schedules a repeating threaded task to get the following details and report them to the
* Branch API <b>once a week</b>:</p>
* <p/>
* <pre style="background:#fff;padding:10px;border:2px solid silver;">
* int interval = 7 * 24 * 60 * 60;
* appListingSchedule_ = scheduler.scheduleAtFixedRate(
* periodicTask, (days * 24 + hours) * 60 * 60, interval, TimeUnit.SECONDS);</pre>
* <p/>
* <ul>
* <li>{@link SystemObserver#getOS()}</li>
* <li>{@link SystemObserver#getListOfApps()}</li>
* </ul>
*
* @see {@link SystemObserver}
* @see {@link PrefHelper}
*/
private void scheduleListOfApps() {
ScheduledThreadPoolExecutor scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1);
Runnable periodicTask = new Runnable() {
@Override
public void run() {
ServerRequest req = new ServerRequestSendAppList(context_);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
};
Date date = new Date();
Calendar calendar = GregorianCalendar.getInstance();
calendar.setTime(date);
int days = Calendar.SATURDAY - calendar.get(Calendar.DAY_OF_WEEK); // days to Saturday
int hours = 2 - calendar.get(Calendar.HOUR_OF_DAY); // hours to 2am, can be negative
if (days == 0 && hours < 0) {
days = 7;
}
int interval = 7 * 24 * 60 * 60;
appListingSchedule_ = scheduler.scheduleAtFixedRate(periodicTask, (days * 24 + hours) * 60 * 60, interval, TimeUnit.SECONDS);
}
private void processNextQueueItem() {
try {
serverSema_.acquire();
if (networkCount_ == 0 && requestQueue_.getSize() > 0) {
networkCount_ = 1;
ServerRequest req = requestQueue_.peek();
serverSema_.release();
if (req != null) {
if (!req.isWaitingOnProcessToFinish()) {
// All request except Install request need a valid IdentityID
if (!(req instanceof ServerRequestRegisterInstall) && !hasUser()) {
Log.i("BranchSDK", "Branch Error: User session has not been initialized!");
networkCount_ = 0;
handleFailure(requestQueue_.getSize() - 1, BranchError.ERR_NO_SESSION);
}
//All request except open and install need a session to execute
else if (!(req instanceof ServerRequestInitSession) && (!hasSession() || !hasDeviceFingerPrint())) {
networkCount_ = 0;
handleFailure(requestQueue_.getSize() - 1, BranchError.ERR_NO_SESSION);
} else {
BranchPostTask postTask = new BranchPostTask(req);
postTask.executeTask();
}
} else {
networkCount_ = 0;
}
} else {
requestQueue_.remove(null); //In case there is any request nullified remove it.
}
} else {
serverSema_.release();
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void handleFailure(int index, int statusCode) {
ServerRequest req;
if (index >= requestQueue_.getSize()) {
req = requestQueue_.peekAt(requestQueue_.getSize() - 1);
} else {
req = requestQueue_.peekAt(index);
}
handleFailure(req, statusCode);
}
private void handleFailure(final ServerRequest req, int statusCode) {
if (req == null)
return;
req.handleFailure(statusCode, "");
}
private void updateAllRequestsInQueue() {
try {
for (int i = 0; i < requestQueue_.getSize(); i++) {
ServerRequest req = requestQueue_.peekAt(i);
if (req != null) {
JSONObject reqJson = req.getPost();
if (reqJson != null) {
if (reqJson.has(Defines.Jsonkey.SessionID.getKey())) {
req.getPost().put(Defines.Jsonkey.SessionID.getKey(), prefHelper_.getSessionID());
}
if (reqJson.has(Defines.Jsonkey.IdentityID.getKey())) {
req.getPost().put(Defines.Jsonkey.IdentityID.getKey(), prefHelper_.getIdentityID());
}
if (reqJson.has(Defines.Jsonkey.DeviceFingerprintID.getKey())) {
req.getPost().put(Defines.Jsonkey.DeviceFingerprintID.getKey(), prefHelper_.getDeviceFingerPrintID());
}
}
}
}
} catch (JSONException e) {
e.printStackTrace();
}
}
private boolean hasSession() {
return !prefHelper_.getSessionID().equals(PrefHelper.NO_STRING_VALUE);
}
private boolean hasDeviceFingerPrint() {
return !prefHelper_.getDeviceFingerPrintID().equals(PrefHelper.NO_STRING_VALUE);
}
private boolean hasUser() {
return !prefHelper_.getIdentityID().equals(PrefHelper.NO_STRING_VALUE);
}
private void insertRequestAtFront(ServerRequest req) {
if (networkCount_ == 0) {
requestQueue_.insert(req, 0);
} else {
requestQueue_.insert(req, 1);
}
}
private void registerInstallOrOpen(ServerRequest req, BranchReferralInitListener callback) {
// If there isn't already an Open / Install request, add one to the queue
if (!requestQueue_.containsInstallOrOpen()) {
insertRequestAtFront(req);
}
// If there is already one in the queue, make sure it's in the front.
// Make sure a callback is associated with this request. This callback can
// be cleared if the app is terminated while an Open/Install is pending.
else {
// Update the callback to the latest one in initsession call
if (callback != null) {
requestQueue_.setInstallOrOpenCallback(callback);
}
requestQueue_.moveInstallOrOpenToFront(req, networkCount_, callback);
}
processNextQueueItem();
}
private void initializeSession(final BranchReferralInitListener callback) {
if ((prefHelper_.getBranchKey() == null || prefHelper_.getBranchKey().equalsIgnoreCase(PrefHelper.NO_STRING_VALUE))) {
initState_ = SESSION_STATE.UNINITIALISED;
//Report Key error on callback
if (callback != null) {
callback.onInitFinished(null, new BranchError("Trouble initializing Branch.", RemoteInterface.NO_BRANCH_KEY_STATUS));
}
Log.i("BranchSDK", "Branch Warning: Please enter your branch_key in your project's res/values/strings.xml!");
return;
} else if (prefHelper_.getBranchKey() != null && prefHelper_.getBranchKey().startsWith("key_test_")) {
Log.i("BranchSDK", "Branch Warning: You are using your test app's Branch Key. Remember to change it to live Branch Key during deployment.");
}
if (!prefHelper_.getExternalIntentUri().equals(PrefHelper.NO_STRING_VALUE) || !enableFacebookAppLinkCheck_) {
registerAppInit(callback, null);
} else {
// Check if opened by facebook with deferred install data
boolean appLinkRqSucceeded;
appLinkRqSucceeded = DeferredAppLinkDataHandler.fetchDeferredAppLinkData(context_, new DeferredAppLinkDataHandler.AppLinkFetchEvents() {
@Override
public void onAppLinkFetchFinished(String nativeAppLinkUrl) {
prefHelper_.setIsAppLinkTriggeredInit(true); // callback returns when app link fetch finishes with success or failure. Report app link checked in both cases
if (nativeAppLinkUrl != null) {
Uri appLinkUri = Uri.parse(nativeAppLinkUrl);
String bncLinkClickId = appLinkUri.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey());
if (!TextUtils.isEmpty(bncLinkClickId)) {
prefHelper_.setLinkClickIdentifier(bncLinkClickId);
}
}
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.FB_APP_LINK_WAIT_LOCK);
processNextQueueItem();
}
});
if (appLinkRqSucceeded) {
registerAppInit(callback, ServerRequest.PROCESS_WAIT_LOCK.FB_APP_LINK_WAIT_LOCK);
} else {
registerAppInit(callback, null);
}
}
}
private void registerAppInit(BranchReferralInitListener
callback, ServerRequest.PROCESS_WAIT_LOCK lock) {
ServerRequest request;
if (hasUser()) {
// If there is user this is open
request = new ServerRequestRegisterOpen(context_, callback, kRemoteInterface_.getSystemObserver());
} else {
// If no user this is an Install
request = new ServerRequestRegisterInstall(context_, callback, kRemoteInterface_.getSystemObserver(), InstallListener.getInstallationID());
}
request.addProcessWaitLock(lock);
if (isGAParamsFetchInProgress_) {
request.addProcessWaitLock(ServerRequest.PROCESS_WAIT_LOCK.GAID_FETCH_WAIT_LOCK);
}
if (intentState_ != INTENT_STATE.READY) {
request.addProcessWaitLock(ServerRequest.PROCESS_WAIT_LOCK.INTENT_PENDING_WAIT_LOCK);
}
if (checkPlayStoreReferrer() && request instanceof ServerRequestRegisterInstall) {
request.addProcessWaitLock(ServerRequest.PROCESS_WAIT_LOCK.INSTALL_REFERRER_FETCH_WAIT_LOCK);
InstallListener.startInstallReferrerTime(PLAYSTORE_REFERRAL_FETCH_WAIT_FOR);
}
registerInstallOrOpen(request, callback);
}
private void onIntentReady(Activity activity) {
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.INTENT_PENDING_WAIT_LOCK);
if (activity.getIntent() != null) {
Uri intentData = activity.getIntent().getData();
readAndStripParam(intentData, activity);
if (cookieBasedMatchDomain_ != null && prefHelper_.getBranchKey() != null && !prefHelper_.getBranchKey().equalsIgnoreCase(PrefHelper.NO_STRING_VALUE)) {
if (isGAParamsFetchInProgress_) {
// Wait for GAID to Available
performCookieBasedStrongMatchingOnGAIDAvailable = true;
} else {
performCookieBasedStrongMatch();
}
} else {
processNextQueueItem();
}
} else {
processNextQueueItem();
}
}
private void performCookieBasedStrongMatch() {
boolean simulateInstall = (prefHelper_.getExternDebug() || isSimulatingInstalls());
DeviceInfo deviceInfo = DeviceInfo.getInstance(simulateInstall, systemObserver_, disableDeviceIDFetch_);
Activity currentActivity = null;
if (currentActivityReference_ != null) {
currentActivity = currentActivityReference_.get();
}
Context context = (currentActivity != null) ? currentActivity.getApplicationContext() : null;
if (context != null) {
requestQueue_.setStrongMatchWaitLock();
BranchStrongMatchHelper.getInstance().checkForStrongMatch(context, cookieBasedMatchDomain_, deviceInfo, prefHelper_, systemObserver_, new BranchStrongMatchHelper.StrongMatchCheckEvents() {
@Override
public void onStrongMatchCheckFinished() {
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.STRONG_MATCH_PENDING_WAIT_LOCK);
processNextQueueItem();
}
});
}
}
/**
* Handles execution of a new request other than open or install.
* Checks for the session initialisation and adds a install/Open request in front of this request
* if the request need session to execute.
*
* @param req The {@link ServerRequest} to execute
*/
public void handleNewRequest(ServerRequest req) {
//If not initialised put an open or install request in front of this request(only if this needs session)
if (initState_ != SESSION_STATE.INITIALISED && !(req instanceof ServerRequestInitSession)) {
if ((req instanceof ServerRequestLogout)) {
req.handleFailure(BranchError.ERR_NO_SESSION, "");
Log.i(TAG, "Branch is not initialized, cannot logout");
return;
}
if ((req instanceof ServerRequestRegisterClose)) {
Log.i(TAG, "Branch is not initialized, cannot close session");
return;
} else {
Activity currentActivity = null;
if (currentActivityReference_ != null) {
currentActivity = currentActivityReference_.get();
}
if (customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT) {
initUserSessionInternal((BranchReferralInitListener) null, currentActivity, true);
} else {
boolean isReferrable = customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.REFERRABLE;
initUserSessionInternal((BranchReferralInitListener) null, currentActivity, isReferrable);
}
}
}
requestQueue_.enqueue(req);
req.onRequestQueued();
processNextQueueItem();
}
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
private void setActivityLifeCycleObserver(Application application) {
try {
BranchActivityLifeCycleObserver activityLifeCycleObserver = new BranchActivityLifeCycleObserver();
/* Set an observer for activity life cycle events. */
application.unregisterActivityLifecycleCallbacks(activityLifeCycleObserver);
application.registerActivityLifecycleCallbacks(activityLifeCycleObserver);
isActivityLifeCycleCallbackRegistered_ = true;
} catch (NoSuchMethodError | NoClassDefFoundError Ex) {
isActivityLifeCycleCallbackRegistered_ = false;
isAutoSessionMode_ = false;
/* LifeCycleEvents are available only from API level 14. */
Log.w(TAG, new BranchError("", BranchError.ERR_API_LVL_14_NEEDED).getMessage());
}
}
/**
* <p>Class that observes activity life cycle events and determines when to start and stop
* session.</p>
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
private class BranchActivityLifeCycleObserver implements Application.ActivityLifecycleCallbacks {
private int activityCnt_ = 0; //Keep the count of live activities.
@Override
public void onActivityCreated(Activity activity, Bundle bundle) {
intentState_ = handleDelayedNewIntents_ ? INTENT_STATE.PENDING : INTENT_STATE.READY;
if (BranchViewHandler.getInstance().isInstallOrOpenBranchViewPending(activity.getApplicationContext())) {
BranchViewHandler.getInstance().showPendingBranchView(activity);
}
}
@Override
public void onActivityStarted(Activity activity) {
intentState_ = handleDelayedNewIntents_ ? INTENT_STATE.PENDING : INTENT_STATE.READY;
// If configured on dashboard, trigger content discovery runnable
if (initState_ == SESSION_STATE.INITIALISED) {
try {
ContentDiscoverer.getInstance().discoverContent(activity, sessionReferredLink_);
} catch (Exception ignore) {
}
}
if (activityCnt_ < 1) { // Check if this is the first Activity.If so start a session.
if (initState_ == SESSION_STATE.INITIALISED) {
// Handling case : init session completed previously when app was in background.
initState_ = SESSION_STATE.UNINITIALISED;
}
// Check if debug mode is set in manifest. If so enable debug.
if (BranchUtil.isTestModeEnabled(context_)) {
prefHelper_.setExternDebug();
}
prefHelper_.setLogging(getIsLogging());
startSession(activity);
} else if (checkIntentForSessionRestart(activity.getIntent())) { // Case of opening the app by clicking a push notification while app is in foreground
initState_ = SESSION_STATE.UNINITIALISED;
// no need call close here since it is session forced restart. Don't want to wait till close finish
startSession(activity);
}
activityCnt_++;
}
@Override
public void onActivityResumed(Activity activity) {
// Need to check here again for session restart request in case the intent is created while the activity is already running
if (checkIntentForSessionRestart(activity.getIntent())) {
initState_ = SESSION_STATE.UNINITIALISED;
startSession(activity);
}
currentActivityReference_ = new WeakReference<>(activity);
if (handleDelayedNewIntents_) {
intentState_ = INTENT_STATE.READY;
onIntentReady(activity);
}
}
@Override
public void onActivityPaused(Activity activity) {
/* Close any opened sharing dialog.*/
if (shareLinkManager_ != null) {
shareLinkManager_.cancelShareLinkDialog(true);
}
}
@Override
public void onActivityStopped(Activity activity) {
ContentDiscoverer.getInstance().onActivityStopped(activity);
activityCnt_--; // Check if this is the last activity. If so, stop the session.
if (activityCnt_ < 1) {
closeSessionInternal();
}
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle bundle) {
}
@Override
public void onActivityDestroyed(Activity activity) {
if (currentActivityReference_ != null && currentActivityReference_.get() == activity) {
currentActivityReference_.clear();
}
BranchViewHandler.getInstance().onCurrentActivityDestroyed(activity);
}
}
private void startSession(Activity activity) {
Uri intentData = null;
if (activity.getIntent() != null) {
intentData = activity.getIntent().getData();
}
initSessionWithData(intentData, activity); // indicate starting of session.
}
/*
* Check for forced session restart. The Branch session is restarted if the incoming intent has branch_force_new_session set to true.
* This is for supporting opening a deep link path while app is already running in the foreground. Such as clicking push notification while app in foreground.
*
*/
private boolean checkIntentForSessionRestart(Intent intent) {
boolean isRestartSessionRequested = false;
if (intent != null) {
isRestartSessionRequested = intent.getBooleanExtra(Defines.Jsonkey.ForceNewBranchSession.getKey(), false);
if (isRestartSessionRequested) {
intent.putExtra(Defines.Jsonkey.ForceNewBranchSession.getKey(), false);
}
}
return isRestartSessionRequested;
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchReferralInitListener}, defining a single method that takes a list of params in
* {@link JSONObject} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see JSONObject
* @see BranchError
*/
public interface BranchReferralInitListener {
void onInitFinished(JSONObject referringParams, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchUniversalReferralInitListener}, defining a single method that provides
* {@link BranchUniversalObject}, {@link LinkProperties} and an error message of {@link BranchError} format that will be
* returned on failure of the request response.
* In case of an error the value for {@link BranchUniversalObject} and {@link LinkProperties} are set to null.</p>
*
* @see BranchUniversalObject
* @see LinkProperties
* @see BranchError
*/
public interface BranchUniversalReferralInitListener {
void onInitFinished(BranchUniversalObject branchUniversalObject, LinkProperties linkProperties, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchReferralStateChangedListener}, defining a single method that takes a value of
* {@link Boolean} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see Boolean
* @see BranchError
*/
public interface BranchReferralStateChangedListener {
void onStateChanged(boolean changed, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchLinkCreateListener}, defining a single method that takes a URL
* {@link String} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see String
* @see BranchError
*/
public interface BranchLinkCreateListener {
void onLinkCreate(String url, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchLinkShareListener}, defining methods to listen for link sharing status.</p>
*/
public interface BranchLinkShareListener {
/**
* <p> Callback method to update when share link dialog is launched.</p>
*/
void onShareLinkDialogLaunched();
/**
* <p> Callback method to update when sharing dialog is dismissed.</p>
*/
void onShareLinkDialogDismissed();
/**
* <p> Callback method to update the sharing status. Called on sharing completed or on error.</p>
*
* @param sharedLink The link shared to the channel.
* @param sharedChannel Channel selected for sharing.
* @param error A {@link BranchError} to update errors, if there is any.
*/
void onLinkShareResponse(String sharedLink, String sharedChannel, BranchError error);
/**
* <p>Called when user select a channel for sharing a deep link.
* Branch will create a deep link for the selected channel and share with it after calling this
* method. On sharing complete, status is updated by onLinkShareResponse() callback. Consider
* having a sharing in progress UI if you wish to prevent user activity in the window between selecting a channel
* and sharing complete.</p>
*
* @param channelName Name of the selected application to share the link. An empty string is returned if unable to resolve selected client name.
*/
void onChannelSelected(String channelName);
}
/**
* <p>An interface class for customizing sharing properties with selected channel.</p>
*/
public interface IChannelProperties {
/**
* @param channel The name of the channel selected for sharing.
* @return {@link String} with value for the message title for sharing the link with the selected channel
*/
String getSharingTitleForChannel(String channel);
/**
* @param channel The name of the channel selected for sharing.
* @return {@link String} with value for the message body for sharing the link with the selected channel
*/
String getSharingMessageForChannel(String channel);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchListResponseListener}, defining a single method that takes a list of
* {@link JSONArray} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see JSONArray
* @see BranchError
*/
public interface BranchListResponseListener {
void onReceivingResponse(JSONArray list, BranchError error);
}
/**
* <p>
* Callback interface for listening logout status
* </p>
*/
public interface LogoutStatusListener {
/**
* Called on finishing the the logout process
*
* @param loggedOut A {@link Boolean} which is set to true if logout succeeded
* @param error An instance of {@link BranchError} to notify any error occurred during logout.
* A null value is set if logout succeeded.
*/
void onLogoutFinished(boolean loggedOut, BranchError error);
}
/**
* <p>enum containing the sort options for return of credit history.</p>
*/
public enum CreditHistoryOrder {
kMostRecentFirst, kLeastRecentFirst
}
/**
* Async Task to create a shorlink for synchronous methods
*/
private class getShortLinkTask extends AsyncTask<ServerRequest, Void, ServerResponse> {
@Override
protected ServerResponse doInBackground(ServerRequest... serverRequests) {
return kRemoteInterface_.createCustomUrlSync(serverRequests[0].getPost());
}
}
/**
* Asynchronous task handling execution of server requests. Execute the network task on background
* thread and request are executed in sequential manner. Handles the request execution in
* Synchronous-Asynchronous pattern. Should be invoked only form main thread and the results are
* published in the main thread.
*/
private class BranchPostTask extends BranchAsyncTask<Void, Void, ServerResponse> {
int timeOut_ = 0;
ServerRequest thisReq_;
public BranchPostTask(ServerRequest request) {
thisReq_ = request;
timeOut_ = prefHelper_.getTimeout();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
thisReq_.onPreExecute();
}
@Override
protected ServerResponse doInBackground(Void... voids) {
if (thisReq_ instanceof ServerRequestInitSession) {
((ServerRequestInitSession) thisReq_).updateLinkReferrerParams();
}
//Update queue wait time
addExtraInstrumentationData(thisReq_.getRequestPath() + "-" + Defines.Jsonkey.Queue_Wait_Time.getKey(), String.valueOf(thisReq_.getQueueWaitTime()));
//Google ADs ID and LAT value are updated using reflection. These method need background thread
//So updating them for install and open on background thread.
if (thisReq_.isGAdsParamsRequired() && !BranchUtil.isTestModeEnabled(context_)) {
thisReq_.updateGAdsParams(systemObserver_);
}
if (thisReq_.isGetRequest()) {
return kRemoteInterface_.make_restful_get(thisReq_.getRequestUrl(), thisReq_.getGetParams(), thisReq_.getRequestPath(), timeOut_);
} else {
return kRemoteInterface_.make_restful_post(thisReq_.getPostWithInstrumentationValues(instrumentationExtraData_), thisReq_.getRequestUrl(), thisReq_.getRequestPath(), timeOut_);
}
}
@Override
protected void onPostExecute(ServerResponse serverResponse) {
super.onPostExecute(serverResponse);
if (serverResponse != null) {
try {
int status = serverResponse.getStatusCode();
hasNetwork_ = true;
//If the request is not succeeded
if (status != 200) {
//If failed request is an initialisation request then mark session not initialised
if (thisReq_ instanceof ServerRequestInitSession) {
initState_ = SESSION_STATE.UNINITIALISED;
}
// On a bad request notify with call back and remove the request.
if (status == 409) {
requestQueue_.remove(thisReq_);
if (thisReq_ instanceof ServerRequestCreateUrl) {
((ServerRequestCreateUrl) thisReq_).handleDuplicateURLError();
} else {
Log.i("BranchSDK", "Branch API Error: Conflicting resource error code from API");
handleFailure(0, status);
}
}
//On Network error or Branch is down fail all the pending requests in the queue except
//for request which need to be replayed on failure.
else {
hasNetwork_ = false;
//Collect all request from the queue which need to be failed.
ArrayList<ServerRequest> requestToFail = new ArrayList<>();
for (int i = 0; i < requestQueue_.getSize(); i++) {
requestToFail.add(requestQueue_.peekAt(i));
}
//Remove the requests from the request queue first
for (ServerRequest req : requestToFail) {
if (req == null || !req.shouldRetryOnFail()) { // Should remove any nullified request object also from queque
requestQueue_.remove(req);
}
}
// Then, set the network count to zero, indicating that requests can be started again.
networkCount_ = 0;
//Finally call the request callback with the error.
for (ServerRequest req : requestToFail) {
if (req != null) {
req.handleFailure(status, serverResponse.getFailReason());
//If request need to be replayed, no need for the callbacks
if (req.shouldRetryOnFail())
req.clearCallbacks();
}
}
}
}
//If the request succeeded
else {
hasNetwork_ = true;
//On create new url cache the url.
if (thisReq_ instanceof ServerRequestCreateUrl) {
if (serverResponse.getObject() != null) {
final String url = serverResponse.getObject().getString("url");
// cache the link
linkCache_.put(((ServerRequestCreateUrl) thisReq_).getLinkPost(), url);
}
}
//On Logout clear the link cache and all pending requests
else if (thisReq_ instanceof ServerRequestLogout) {
linkCache_.clear();
requestQueue_.clear();
}
requestQueue_.dequeue();
// If this request changes a session update the session-id to queued requests.
if (thisReq_ instanceof ServerRequestInitSession
|| thisReq_ instanceof ServerRequestIdentifyUserRequest) {
// Immediately set session and Identity and update the pending request with the params
JSONObject respJson = serverResponse.getObject();
if (respJson != null) {
boolean updateRequestsInQueue = false;
if (respJson.has(Defines.Jsonkey.SessionID.getKey())) {
prefHelper_.setSessionID(respJson.getString(Defines.Jsonkey.SessionID.getKey()));
updateRequestsInQueue = true;
}
if (respJson.has(Defines.Jsonkey.IdentityID.getKey())) {
String new_Identity_Id = respJson.getString(Defines.Jsonkey.IdentityID.getKey());
if (!prefHelper_.getIdentityID().equals(new_Identity_Id)) {
//On setting a new identity Id clear the link cache
linkCache_.clear();
prefHelper_.setIdentityID(respJson.getString(Defines.Jsonkey.IdentityID.getKey()));
updateRequestsInQueue = true;
}
}
if (respJson.has(Defines.Jsonkey.DeviceFingerprintID.getKey())) {
prefHelper_.setDeviceFingerPrintID(respJson.getString(Defines.Jsonkey.DeviceFingerprintID.getKey()));
updateRequestsInQueue = true;
}
if (updateRequestsInQueue) {
updateAllRequestsInQueue();
}
if (thisReq_ instanceof ServerRequestInitSession) {
initState_ = SESSION_STATE.INITIALISED;
thisReq_.onRequestSucceeded(serverResponse, branchReferral_);
// Publish success to listeners
isInitReportedThroughCallBack = ((ServerRequestInitSession) thisReq_).hasCallBack();
if (!((ServerRequestInitSession) thisReq_).handleBranchViewIfAvailable((serverResponse))) {
checkForAutoDeepLinkConfiguration();
}
// Count down the latch holding getLatestReferringParamsSync
if (getLatestReferringParamsLatch != null) {
getLatestReferringParamsLatch.countDown();
}
// Count down the latch holding getFirstReferringParamsSync
if (getFirstReferringParamsLatch != null) {
getFirstReferringParamsLatch.countDown();
}
} else {
// For setting identity just call only request succeeded
thisReq_.onRequestSucceeded(serverResponse, branchReferral_);
}
}
} else {
//Publish success to listeners
thisReq_.onRequestSucceeded(serverResponse, branchReferral_);
}
}
networkCount_ = 0;
if (hasNetwork_ && initState_ != SESSION_STATE.UNINITIALISED) {
processNextQueueItem();
}
} catch (JSONException ex) {
ex.printStackTrace();
}
}
}
}
//-------------------Auto deep link feature-------------------------------------------//
/**
* <p>Checks if an activity is launched by Branch auto deep link feature. Branch launches activitie configured for auto deep link on seeing matching keys.
* Keys for auto deep linking should be specified to each activity as a meta data in manifest.</p>
* Configure your activity in your manifest to enable auto deep linking as follows
* <!--
* <activity android:name=".YourActivity">
* <meta-data android:name="io.branch.sdk.auto_link" android:value="DeepLinkKey1","DeepLinkKey2" />
* </activity>
* -->
*
* @param activity Instance of activity to check if launched on auto deep link.
* @return A {Boolean} value whose value is true if this activity is launched by Branch auto deeplink feature.
*/
public static boolean isAutoDeepLinkLaunch(Activity activity) {
return (activity.getIntent().getStringExtra(AUTO_DEEP_LINKED) != null);
}
private void checkForAutoDeepLinkConfiguration() {
JSONObject latestParams = getLatestReferringParams();
String deepLinkActivity = null;
try {
//Check if the application is launched by clicking a Branch link.
if (!latestParams.has(Defines.Jsonkey.Clicked_Branch_Link.getKey())
|| !latestParams.getBoolean(Defines.Jsonkey.Clicked_Branch_Link.getKey())) {
return;
}
if (latestParams.length() > 0) {
// Check if auto deep link is disabled.
ApplicationInfo appInfo = context_.getPackageManager().getApplicationInfo(context_.getPackageName(), PackageManager.GET_META_DATA);
if (appInfo.metaData != null && appInfo.metaData.getBoolean(AUTO_DEEP_LINK_DISABLE, false)) {
return;
}
PackageInfo info = context_.getPackageManager().getPackageInfo(context_.getPackageName(), PackageManager.GET_ACTIVITIES | PackageManager.GET_META_DATA);
ActivityInfo[] activityInfos = info.activities;
int deepLinkActivityReqCode = DEF_AUTO_DEEP_LINK_REQ_CODE;
if (activityInfos != null) {
for (ActivityInfo activityInfo : activityInfos) {
if (activityInfo != null && activityInfo.metaData != null && (activityInfo.metaData.getString(AUTO_DEEP_LINK_KEY) != null || activityInfo.metaData.getString(AUTO_DEEP_LINK_PATH) != null)) {
if (checkForAutoDeepLinkKeys(latestParams, activityInfo) || checkForAutoDeepLinkPath(latestParams, activityInfo)) {
deepLinkActivity = activityInfo.name;
deepLinkActivityReqCode = activityInfo.metaData.getInt(AUTO_DEEP_LINK_REQ_CODE, DEF_AUTO_DEEP_LINK_REQ_CODE);
break;
}
}
}
}
if (deepLinkActivity != null && currentActivityReference_ != null) {
Activity currentActivity = currentActivityReference_.get();
if (currentActivity != null) {
Intent intent = new Intent(currentActivity, Class.forName(deepLinkActivity));
intent.putExtra(AUTO_DEEP_LINKED, "true");
// Put the raw JSON params as extra in case need to get the deep link params as JSON String
intent.putExtra(Defines.Jsonkey.ReferringData.getKey(), latestParams.toString());
// Add individual parameters in the data
Iterator<?> keys = latestParams.keys();
while (keys.hasNext()) {
String key = (String) keys.next();
intent.putExtra(key, latestParams.getString(key));
}
currentActivity.startActivityForResult(intent, deepLinkActivityReqCode);
} else {
// This case should not happen. Adding a safe handling for any corner case
Log.w(TAG, "No activity reference to launch deep linked activity");
}
}
}
} catch (final PackageManager.NameNotFoundException e) {
Log.i("BranchSDK", "Branch Warning: Please make sure Activity names set for auto deep link are correct!");
} catch (ClassNotFoundException e) {
Log.i("BranchSDK", "Branch Warning: Please make sure Activity names set for auto deep link are correct! Error while looking for activity " + deepLinkActivity);
} catch (Exception ignore) {
// Can get TransactionTooLarge Exception here if the Application info exceeds 1mb binder data limit. Usually results with manifest merge from SDKs
}
}
private boolean checkForAutoDeepLinkKeys(JSONObject params, ActivityInfo activityInfo) {
if (activityInfo.metaData.getString(AUTO_DEEP_LINK_KEY) != null) {
String[] activityLinkKeys = activityInfo.metaData.getString(AUTO_DEEP_LINK_KEY).split(",");
for (String activityLinkKey : activityLinkKeys) {
if (params.has(activityLinkKey)) {
return true;
}
}
}
return false;
}
private boolean checkForAutoDeepLinkPath(JSONObject params, ActivityInfo activityInfo) {
String deepLinkPath = null;
try {
if (params.has(Defines.Jsonkey.AndroidDeepLinkPath.getKey())) {
deepLinkPath = params.getString(Defines.Jsonkey.AndroidDeepLinkPath.getKey());
} else if (params.has(Defines.Jsonkey.DeepLinkPath.getKey())) {
deepLinkPath = params.getString(Defines.Jsonkey.DeepLinkPath.getKey());
}
} catch (JSONException ignored) {
}
if (activityInfo.metaData.getString(AUTO_DEEP_LINK_PATH) != null && deepLinkPath != null) {
String[] activityLinkPaths = activityInfo.metaData.getString(AUTO_DEEP_LINK_PATH).split(",");
for (String activityLinkPath : activityLinkPaths) {
if (pathMatch(activityLinkPath.trim(), deepLinkPath)) {
return true;
}
}
}
return false;
}
private boolean pathMatch(String templatePath, String path) {
boolean matched = true;
String[] pathSegmentsTemplate = templatePath.split("\\?")[0].split("/");
String[] pathSegmentsTarget = path.split("\\?")[0].split("/");
if (pathSegmentsTemplate.length != pathSegmentsTarget.length) {
return false;
}
for (int i = 0; i < pathSegmentsTemplate.length && i < pathSegmentsTarget.length; i++) {
String pathSegmentTemplate = pathSegmentsTemplate[i];
String pathSegmentTarget = pathSegmentsTarget[i];
if (!pathSegmentTemplate.equals(pathSegmentTarget) && !pathSegmentTemplate.contains("*")) {
matched = false;
break;
}
}
return matched;
}
public static void enableSimulateInstalls() {
isSimulatingInstalls_ = true;
}
public static void disableSimulateInstalls() {
isSimulatingInstalls_ = false;
}
public static boolean isSimulatingInstalls() {
return isSimulatingInstalls_;
}
public static void enableLogging() {
isLogging_ = true;
}
public static void disableLogging() {
isLogging_ = false;
}
public static boolean getIsLogging() {
return isLogging_;
}
//-------------------------- Branch Builders--------------------------------------//
/**
* <p> Class for building a share link dialog.This creates a chooser for selecting application for
* sharing a link created with given parameters. </p>
*/
public static class ShareLinkBuilder {
private final Activity activity_;
private final Branch branch_;
private String shareMsg_;
private String shareSub_;
private Branch.BranchLinkShareListener callback_ = null;
private Branch.IChannelProperties channelPropertiesCallback_ = null;
private ArrayList<SharingHelper.SHARE_WITH> preferredOptions_;
private String defaultURL_;
//Customise more and copy url option
private Drawable moreOptionIcon_;
private String moreOptionText_;
private Drawable copyUrlIcon_;
private String copyURlText_;
private String urlCopiedMessage_;
private int styleResourceID_;
private boolean setFullWidthStyle_;
private int dividerHeight = -1;
private String sharingTitle = null;
private View sharingTitleView = null;
BranchShortLinkBuilder shortLinkBuilder_;
private List<String> includeInShareSheet = new ArrayList<>();
private List<String> excludeFromShareSheet = new ArrayList<>();
/**
* <p>Creates options for sharing a link with other Applications. Creates a builder for sharing the link with
* user selected clients</p>
*
* @param activity The {@link Activity} to show the dialog for choosing sharing application.
* @param parameters A {@link JSONObject} value containing the deep link params.
*/
public ShareLinkBuilder(Activity activity, JSONObject parameters) {
this.activity_ = activity;
this.branch_ = branchReferral_;
shortLinkBuilder_ = new BranchShortLinkBuilder(activity);
try {
Iterator<String> keys = parameters.keys();
while (keys.hasNext()) {
String key = keys.next();
shortLinkBuilder_.addParameters(key, (String) parameters.get(key));
}
} catch (Exception ignore) {
}
shareMsg_ = "";
callback_ = null;
channelPropertiesCallback_ = null;
preferredOptions_ = new ArrayList<>();
defaultURL_ = null;
moreOptionIcon_ = BranchUtil.getDrawable(activity.getApplicationContext(), android.R.drawable.ic_menu_more);
moreOptionText_ = "More...";
copyUrlIcon_ = BranchUtil.getDrawable(activity.getApplicationContext(), android.R.drawable.ic_menu_save);
copyURlText_ = "Copy link";
urlCopiedMessage_ = "Copied link to clipboard!";
}
/**
* *<p>Creates options for sharing a link with other Applications. Creates a builder for sharing the link with
* user selected clients</p>
*
* @param activity The {@link Activity} to show the dialog for choosing sharing application.
* @param shortLinkBuilder An instance of {@link BranchShortLinkBuilder} to create link to be shared
*/
public ShareLinkBuilder(Activity activity, BranchShortLinkBuilder shortLinkBuilder) {
this(activity, new JSONObject());
shortLinkBuilder_ = shortLinkBuilder;
}
/**
* <p>Sets the message to be shared with the link.</p>
*
* @param message A {@link String} to be shared with the link
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setMessage(String message) {
this.shareMsg_ = message;
return this;
}
/**
* <p>Sets the subject of this message. This will be added to Email and SMS Application capable of handling subject in the message.</p>
*
* @param subject A {@link String} subject of this message.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setSubject(String subject) {
this.shareSub_ = subject;
return this;
}
/**
* <p>Adds the given tag an iterable {@link Collection} of {@link String} tags associated with a deep
* link.</p>
*
* @param tag A {@link String} to be added to the iterable {@link Collection} of {@link String} tags associated with a deep
* link.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addTag(String tag) {
this.shortLinkBuilder_.addTag(tag);
return this;
}
/**
* <p>Adds the given tag an iterable {@link Collection} of {@link String} tags associated with a deep
* link.</p>
*
* @param tags A {@link java.util.List} of tags to be added to the iterable {@link Collection} of {@link String} tags associated with a deep
* link.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addTags(ArrayList<String> tags) {
this.shortLinkBuilder_.addTags(tags);
return this;
}
/**
* <p>Adds a feature that make use of the link.</p>
*
* @param feature A {@link String} value identifying the feature that the link makes use of.
* Should not exceed 128 characters.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setFeature(String feature) {
this.shortLinkBuilder_.setFeature(feature);
return this;
}
/**
* <p>Adds a stage application or user flow associated with this link.</p>
*
* @param stage A {@link String} value identifying the stage in an application or user flow
* process. Should not exceed 128 characters.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setStage(String stage) {
this.shortLinkBuilder_.setStage(stage);
return this;
}
/**
* <p>Adds a callback to get the sharing status.</p>
*
* @param callback A {@link BranchLinkShareListener} instance for getting sharing status.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setCallback(BranchLinkShareListener callback) {
this.callback_ = callback;
return this;
}
/**
* @param channelPropertiesCallback A {@link io.branch.referral.Branch.IChannelProperties} instance for customizing sharing properties for channels.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setChannelProperties(IChannelProperties channelPropertiesCallback) {
this.channelPropertiesCallback_ = channelPropertiesCallback;
return this;
}
/**
* <p>Adds application to the preferred list of applications which are shown on share dialog.
* Only these options will be visible when the application selector dialog launches. Other options can be
* accessed by clicking "More"</p>
*
* @param preferredOption A list of applications to be added as preferred options on the app chooser.
* Preferred applications are defined in {@link io.branch.referral.SharingHelper.SHARE_WITH}.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addPreferredSharingOption(SharingHelper.SHARE_WITH preferredOption) {
this.preferredOptions_.add(preferredOption);
return this;
}
/**
* <p>Adds application to the preferred list of applications which are shown on share dialog.
* Only these options will be visible when the application selector dialog launches. Other options can be
* accessed by clicking "More"</p>
*
* @param preferredOptions A list of applications to be added as preferred options on the app chooser.
* Preferred applications are defined in {@link io.branch.referral.SharingHelper.SHARE_WITH}.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addPreferredSharingOptions(ArrayList<SharingHelper.SHARE_WITH> preferredOptions) {
this.preferredOptions_.addAll(preferredOptions);
return this;
}
/**
* Add the given key value to the deep link parameters
*
* @param key A {@link String} with value for the key for the deep link params
* @param value A {@link String} with deep link parameters value
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addParam(String key, String value) {
try {
this.shortLinkBuilder_.addParameters(key, value);
} catch (Exception ignore) {
}
return this;
}
/**
* <p> Set a default url to share in case there is any error creating the deep link </p>
*
* @param url A {@link String} with value of default url to be shared with the selected application in case deep link creation fails.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setDefaultURL(String url) {
defaultURL_ = url;
return this;
}
/**
* <p> Set the icon and label for the option to expand the application list to see more options.
* Default label is set to "More" </p>
*
* @param icon Drawable to set as the icon for more option. Default icon is system menu_more icon.
* @param label A {@link String} with value for the more option label. Default label is "More"
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setMoreOptionStyle(Drawable icon, String label) {
moreOptionIcon_ = icon;
moreOptionText_ = label;
return this;
}
/**
* <p> Set the icon and label for the option to expand the application list to see more options.
* Default label is set to "More" </p>
*
* @param drawableIconID Resource ID for the drawable to set as the icon for more option. Default icon is system menu_more icon.
* @param stringLabelID Resource ID for String label for the more option. Default label is "More"
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setMoreOptionStyle(int drawableIconID, int stringLabelID) {
moreOptionIcon_ = BranchUtil.getDrawable(activity_.getApplicationContext(), drawableIconID);
moreOptionText_ = activity_.getResources().getString(stringLabelID);
return this;
}
/**
* <p> Set the icon, label and success message for copy url option. Default label is "Copy link".</p>
*
* @param icon Drawable to set as the icon for copy url option. Default icon is system menu_save icon
* @param label A {@link String} with value for the copy url option label. Default label is "Copy link"
* @param message A {@link String} with value for a toast message displayed on copying a url.
* Default message is "Copied link to clipboard!"
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setCopyUrlStyle(Drawable icon, String label, String message) {
copyUrlIcon_ = icon;
copyURlText_ = label;
urlCopiedMessage_ = message;
return this;
}
/**
* <p> Set the icon, label and success message for copy url option. Default label is "Copy link".</p>
*
* @param drawableIconID Resource ID for the drawable to set as the icon for copy url option. Default icon is system menu_save icon
* @param stringLabelID Resource ID for the string label the copy url option. Default label is "Copy link"
* @param stringMessageID Resource ID for the string message to show toast message displayed on copying a url
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setCopyUrlStyle(int drawableIconID, int stringLabelID, int stringMessageID) {
copyUrlIcon_ = BranchUtil.getDrawable(activity_.getApplicationContext(), drawableIconID);
copyURlText_ = activity_.getResources().getString(stringLabelID);
urlCopiedMessage_ = activity_.getResources().getString(stringMessageID);
return this;
}
/**
* <p> Sets the alias for this link. </p>
*
* @param alias Link 'alias' can be used to label the endpoint on the link.
* <p>
* For example:
* http://bnc.lt/AUSTIN28.
* Should not exceed 128 characters
* </p>
* @return This Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setAlias(String alias) {
this.shortLinkBuilder_.setAlias(alias);
return this;
}
/**
* <p> Sets the amount of time that Branch allows a click to remain outstanding.</p>
*
* @param matchDuration A {@link Integer} value specifying the time that Branch allows a click to
* remain outstanding and be eligible to be matched with a new app session.
* @return This Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setMatchDuration(int matchDuration) {
this.shortLinkBuilder_.setDuration(matchDuration);
return this;
}
/**
* <p>
* Sets the share dialog to full width mode. Full width mode will show a non modal sheet with entire screen width.
* </p>
*
* @param setFullWidthStyle {@link Boolean} With value true if a full width style share sheet is desired.
* @return This Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setAsFullWidthStyle(boolean setFullWidthStyle) {
this.setFullWidthStyle_ = setFullWidthStyle;
return this;
}
/**
* Set the height for the divider for the sharing channels in the list. Set this to zero to remove the dividers
*
* @param height The new height of the divider in pixels.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setDividerHeight(int height) {
this.dividerHeight = height;
return this;
}
/**
* Set the title for the sharing dialog
*
* @param title {@link String} containing the value for the title text.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setSharingTitle(String title) {
this.sharingTitle = title;
return this;
}
/**
* Set the title for the sharing dialog
*
* @param titleView {@link View} for setting the title.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setSharingTitle(View titleView) {
this.sharingTitleView = titleView;
return this;
}
/**
* Exclude items from the ShareSheet by package name String.
*
* @param packageName {@link String} package name to be excluded.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder excludeFromShareSheet(@NonNull String packageName) {
this.excludeFromShareSheet.add(packageName);
return this;
}
/**
* Exclude items from the ShareSheet by package name array.
*
* @param packageName {@link String[]} package name to be excluded.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder excludeFromShareSheet(@NonNull String[] packageName) {
this.excludeFromShareSheet.addAll(Arrays.asList(packageName));
return this;
}
/**
* Exclude items from the ShareSheet by package name List.
*
* @param packageNames {@link List} package name to be excluded.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder excludeFromShareSheet(@NonNull List<String> packageNames) {
this.excludeFromShareSheet.addAll(packageNames);
return this;
}
/**
* Include items from the ShareSheet by package name String. If only "com.Slack"
* is included, then only preferred sharing options + Slack
* will be displayed, for example.
*
* @param packageName {@link String} package name to be included.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder includeInShareSheet(@NonNull String packageName) {
this.includeInShareSheet.add(packageName);
return this;
}
/**
* Include items from the ShareSheet by package name Array. If only "com.Slack"
* is included, then only preferred sharing options + Slack
* will be displayed, for example.
*
* @param packageName {@link String[]} package name to be included.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder includeInShareSheet(@NonNull String[] packageName) {
this.includeInShareSheet.addAll(Arrays.asList(packageName));
return this;
}
/**
* Include items from the ShareSheet by package name List. If only "com.Slack"
* is included, then only preferred sharing options + Slack
* will be displayed, for example.
*
* @param packageNames {@link List} package name to be included.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder includeInShareSheet(@NonNull List<String> packageNames) {
this.includeInShareSheet.addAll(packageNames);
return this;
}
/**
* <p> Set the given style to the List View showing the share sheet</p>
*
* @param resourceID A Styleable resource to be applied to the share sheet list view
*/
public void setStyleResourceID(@StyleRes int resourceID) {
styleResourceID_ = resourceID;
}
public void setShortLinkBuilderInternal(BranchShortLinkBuilder shortLinkBuilder) {
this.shortLinkBuilder_ = shortLinkBuilder;
}
/**
* <p>Creates an application selector dialog and share a link with user selected sharing option.
* The link is created with the parameters provided to the builder. </p>
*/
public void shareLink() {
branchReferral_.shareLink(this);
}
public Activity getActivity() {
return activity_;
}
public ArrayList<SharingHelper.SHARE_WITH> getPreferredOptions() {
return preferredOptions_;
}
List<String> getExcludedFromShareSheet() {
return excludeFromShareSheet;
}
List<String> getIncludedInShareSheet() {
return includeInShareSheet;
}
public Branch getBranch() {
return branch_;
}
public String getShareMsg() {
return shareMsg_;
}
public String getShareSub() {
return shareSub_;
}
public BranchLinkShareListener getCallback() {
return callback_;
}
public IChannelProperties getChannelPropertiesCallback() {
return channelPropertiesCallback_;
}
public String getDefaultURL() {
return defaultURL_;
}
public Drawable getMoreOptionIcon() {
return moreOptionIcon_;
}
public String getMoreOptionText() {
return moreOptionText_;
}
public Drawable getCopyUrlIcon() {
return copyUrlIcon_;
}
public String getCopyURlText() {
return copyURlText_;
}
public String getUrlCopiedMessage() {
return urlCopiedMessage_;
}
public BranchShortLinkBuilder getShortLinkBuilder() {
return shortLinkBuilder_;
}
public boolean getIsFullWidthStyle() {
return setFullWidthStyle_;
}
public int getDividerHeight() {
return dividerHeight;
}
public String getSharingTitle() {
return sharingTitle;
}
public View getSharingTitleView() {
return sharingTitleView;
}
public int getStyleResourceID() {
return styleResourceID_;
}
}
//------------------------ Content Indexing methods----------------------//
public void registerView(BranchUniversalObject
branchUniversalObject, BranchUniversalObject.RegisterViewStatusListener callback) {
if (context_ != null) {
ServerRequest req;
req = new ServerRequestRegisterView(context_, branchUniversalObject, systemObserver_, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
}
///-------Instrumentation additional data---------------///
/**
* Update the extra instrumentation data provided to Branch
*
* @param instrumentationData A {@link HashMap} with key value pairs for instrumentation data.
*/
public void addExtraInstrumentationData(HashMap<String, String> instrumentationData) {
instrumentationExtraData_.putAll(instrumentationData);
}
/**
* Update the extra instrumentation data provided to Branch
*
* @param key A {@link String} Value for instrumentation data key
* @param value A {@link String} Value for instrumentation data value
*/
public void addExtraInstrumentationData(String key, String value) {
instrumentationExtraData_.put(key, value);
}
//-------------------- Branch view handling--------------------//
@Override
public void onBranchViewVisible(String action, String branchViewID) {
//No Implementation on purpose
}
@Override
public void onBranchViewAccepted(String action, String branchViewID) {
if (ServerRequestInitSession.isInitSessionAction(action)) {
checkForAutoDeepLinkConfiguration();
}
}
@Override
public void onBranchViewCancelled(String action, String branchViewID) {
if (ServerRequestInitSession.isInitSessionAction(action)) {
checkForAutoDeepLinkConfiguration();
}
}
@Override
public void onBranchViewError(int errorCode, String errorMsg, String action) {
if (ServerRequestInitSession.isInitSessionAction(action)) {
checkForAutoDeepLinkConfiguration();
}
}
/**
* Interface for defining optional Branch view behaviour for Activities
*/
public interface IBranchViewControl {
/**
* Defines if an activity is interested to show Branch views or not.
* By default activities are considered as Branch view enabled. In case of activities which are not interested to show a Branch view (Splash screen for example)
* should implement this and return false. The pending Branch view will be shown with the very next Branch view enabled activity
*
* @return A {@link Boolean} whose value is true if the activity don't want to show any Branch view.
*/
boolean skipBranchViewsOnThisActivity();
}
///----------------- Instant App support--------------------------//
private static Context lastApplicationContext = null;
private static Boolean isInstantApp = null;
/**
* Checks if this is an Instant app instance
*
* @param context Current {@link Context}
* @return {@code true} if current application is an instance of instant app
*/
public static boolean isInstantApp(@NonNull Context context) {
try {
Context applicationContext = context.getApplicationContext();
if (isInstantApp != null && applicationContext.equals(lastApplicationContext)) {
return isInstantApp.booleanValue();
} else {
isInstantApp = null;
lastApplicationContext = applicationContext;
applicationContext.getClassLoader().loadClass("com.google.android.instantapps.supervisor.InstantAppsRuntime");
isInstantApp = Boolean.valueOf(true);
}
} catch (Exception ex) {
isInstantApp = Boolean.valueOf(false);
}
return isInstantApp.booleanValue();
}
/**
* Method shows play store install prompt for the full app. Thi passes the referrer to the installed application. The same deep link params as the instant app are provided to the
* full app up on Branch#initSession()
*
* @param activity Current activity
* @param requestCode Request code for the activity to receive the result
* @return {@code true} if install prompt is shown to user
*/
public static boolean showInstallPrompt(@NonNull Activity activity, int requestCode) {
String installReferrerString = "";
if (Branch.getInstance() != null) {
JSONObject latestReferringParams = Branch.getInstance().getLatestReferringParams();
String referringLinkKey = "~" + Defines.Jsonkey.ReferringLink.getKey();
if (latestReferringParams != null && latestReferringParams.has(referringLinkKey)) {
try {
String referringLink = latestReferringParams.getString(referringLinkKey);
installReferrerString = Defines.Jsonkey.IsFullAppConv.getKey() + "=true&" + Defines.Jsonkey.ReferringLink.getKey() + "=" + referringLink;
} catch (JSONException e) {
e.printStackTrace();
}
}
}
return showInstallPrompt(activity, requestCode, installReferrerString);
}
/**
* Method shows play store install prompt for the full app. Use this method only if you have custom parameters to pass to the full app using referrer else use
* {@link #showInstallPrompt(Activity, int)}
*
* @param activity Current activity
* @param requestCode Request code for the activity to receive the result
* @param referrer Any custom referrer string to pass to full app (must be of format "referrer_key1=referrer_value1%26referrer_key2=referrer_value2")
* @return {@code true} if install prompt is shown to user
*/
public static boolean showInstallPrompt(@NonNull Activity activity, int requestCode, @Nullable String referrer) {
String installReferrerString = Defines.Jsonkey.IsFullAppConv.getKey() + "=true&" + referrer;
return showInstallPrompt(activity, requestCode, installReferrerString);
}
/**
* Method shows play store install prompt for the full app. Use this method only if you want the full app to receive a custom {@link BranchUniversalObject} to do deferred deep link.
* Please see {@link #showInstallPrompt(Activity, int)}
* NOTE :
* This method will do a synchronous generation of Branch short link for the BUO. So please consider calling this method on non UI thread
* Please make sure your instant app and full ap are using same Branch key in order for the deferred deep link working
*
* @param activity Current activity
* @param requestCode Request code for the activity to receive the result
* @param buo {@link BranchUniversalObject} to pass to the full app up on install
* @return {@code true} if install prompt is shown to user
*/
public static boolean showInstallPrompt(@NonNull Activity activity, int requestCode, @NonNull BranchUniversalObject buo) {
if (buo != null) {
String shortUrl = buo.getShortUrl(activity, new LinkProperties());
String installReferrerString = Defines.Jsonkey.ReferringLink.getKey() + "=" + shortUrl;
if (!TextUtils.isEmpty(installReferrerString)) {
return showInstallPrompt(activity, requestCode, installReferrerString);
} else {
return showInstallPrompt(activity, requestCode, "");
}
}
return false;
}
private static boolean doShowInstallPrompt(@NonNull Activity activity, int requestCode, @Nullable String referrer) {
if (activity == null) {
Log.e("BranchSDK", "Unable to show install prompt. Activity is null");
return false;
} else if (!isInstantApp(activity)) {
Log.e("BranchSDK", "Unable to show install prompt. Application is not an instant app");
return false;
} else {
Intent intent = (new Intent("android.intent.action.VIEW")).setPackage("com.android.vending").addCategory("android.intent.category.DEFAULT")
.putExtra("callerId", activity.getPackageName())
.putExtra("overlay", true);
Uri.Builder uriBuilder = (new Uri.Builder()).scheme("market").authority("details").appendQueryParameter("id", activity.getPackageName());
if (!TextUtils.isEmpty(referrer)) {
uriBuilder.appendQueryParameter("referrer", referrer);
}
intent.setData(uriBuilder.build());
activity.startActivityForResult(intent, requestCode);
return true;
}
}
}
|
Branch-SDK/src/io/branch/referral/Branch.java
|
package io.branch.referral;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StyleRes;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.ref.WeakReference;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import io.branch.indexing.BranchUniversalObject;
import io.branch.indexing.ContentDiscoverer;
import io.branch.referral.util.CommerceEvent;
import io.branch.referral.util.LinkProperties;
/**
* <p>
* The core object required when using Branch SDK. You should declare an object of this type at
* the class-level of each Activity or Fragment that you wish to use Branch functionality within.
* </p>
* <p>
* Normal instantiation of this object would look like this:
* </p>
* <!--
* <pre style="background:#fff;padding:10px;border:2px solid silver;">
* Branch.getInstance(this.getApplicationContext()) // from an Activity
* Branch.getInstance(getActivity().getApplicationContext()) // from a Fragment
* </pre>
* -->
*/
public class Branch implements BranchViewHandler.IBranchViewEvents, SystemObserver.GAdsParamsFetchEvents, InstallListener.IInstallReferrerEvents {
private static final String TAG = "BranchSDK";
/**
* Hard-coded {@link String} that denotes a {@link BranchLinkData#tags}; applies to links that
* are shared with others directly as a user action, via social media for instance.
*/
public static final String FEATURE_TAG_SHARE = "share";
/**
* Hard-coded {@link String} that denotes a 'referral' tag; applies to links that are associated
* with a referral program, incentivized or not.
*/
public static final String FEATURE_TAG_REFERRAL = "referral";
/**
* Hard-coded {@link String} that denotes a 'referral' tag; applies to links that are sent as
* referral actions by users of an app using an 'invite contacts' feature for instance.
*/
public static final String FEATURE_TAG_INVITE = "invite";
/**
* Hard-coded {@link String} that denotes a link that is part of a commercial 'deal' or offer.
*/
public static final String FEATURE_TAG_DEAL = "deal";
/**
* Hard-coded {@link String} that denotes a link tagged as a gift action within a service or
* product.
*/
public static final String FEATURE_TAG_GIFT = "gift";
/**
* The code to be passed as part of a deal or gift; retrieved from the Branch object as a
* tag upon initialisation. Of {@link String} format.
*/
public static final String REDEEM_CODE = "$redeem_code";
/**
* <p>Default value of referral bucket; referral buckets contain credits that are used when users
* are referred to your apps. These can be viewed in the Branch dashboard under Referrals.</p>
*/
public static final String REFERRAL_BUCKET_DEFAULT = "default";
/**
* <p>Hard-coded value for referral code type. Referral codes will always result on "credit" actions.
* Even if they are of 0 value.</p>
*/
public static final String REFERRAL_CODE_TYPE = "credit";
/**
* Branch SDK version for the current release of the Branch SDK.
*/
public static final int REFERRAL_CREATION_SOURCE_SDK = 2;
/**
* Key value for referral code as a parameter.
*/
public static final String REFERRAL_CODE = "referral_code";
/**
* The redirect URL provided when the link is handled by a desktop client.
*/
public static final String REDIRECT_DESKTOP_URL = "$desktop_url";
/**
* The redirect URL provided when the link is handled by an Android device.
*/
public static final String REDIRECT_ANDROID_URL = "$android_url";
/**
* The redirect URL provided when the link is handled by an iOS device.
*/
public static final String REDIRECT_IOS_URL = "$ios_url";
/**
* The redirect URL provided when the link is handled by a large form-factor iOS device such as
* an iPad.
*/
public static final String REDIRECT_IPAD_URL = "$ipad_url";
/**
* The redirect URL provided when the link is handled by an Amazon Fire device.
*/
public static final String REDIRECT_FIRE_URL = "$fire_url";
/**
* The redirect URL provided when the link is handled by a Blackberry device.
*/
public static final String REDIRECT_BLACKBERRY_URL = "$blackberry_url";
/**
* The redirect URL provided when the link is handled by a Windows Phone device.
*/
public static final String REDIRECT_WINDOWS_PHONE_URL = "$windows_phone_url";
/**
* Open Graph: The title of your object as it should appear within the graph, e.g., "The Rock".
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_TITLE = "$og_title";
/**
* The description of the object to appear in social media feeds that use
* Facebook's Open Graph specification.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_DESC = "$og_description";
/**
* An image URL which should represent your object to appear in social media feeds that use
* Facebook's Open Graph specification.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_IMAGE_URL = "$og_image_url";
/**
* A URL to a video file that complements this object.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_VIDEO = "$og_video";
/**
* The canonical URL of your object that will be used as its permanent ID in the graph.
*
* @see <a href="http://ogp.me/#metadata">Open Graph - Basic Metadata</a>
*/
public static final String OG_URL = "$og_url";
/**
* Unique identifier for the app in use.
*/
public static final String OG_APP_ID = "$og_app_id";
/**
* {@link String} value denoting the deep link path to override Branch's default one. By
* default, Branch will use yourapp://open?link_click_id=12345. If you specify this key/value,
* Branch will use yourapp://'$deeplink_path'?link_click_id=12345
*/
public static final String DEEPLINK_PATH = "$deeplink_path";
/**
* {@link String} value indicating whether the link should always initiate a deep link action.
* By default, unless overridden on the dashboard, Branch will only open the app if they are
* 100% sure the app is installed. This setting will cause the link to always open the app.
* Possible values are "true" or "false"
*/
public static final String ALWAYS_DEEPLINK = "$always_deeplink";
/**
* An {@link Integer} value indicating the user to reward for applying a referral code. In this
* case, the user applying the referral code receives credit.
*/
public static final int REFERRAL_CODE_LOCATION_REFERREE = 0;
/**
* An {@link Integer} value indicating the user to reward for applying a referral code. In this
* case, the user who created the referral code receives credit.
*/
public static final int REFERRAL_CODE_LOCATION_REFERRING_USER = 2;
/**
* An {@link Integer} value indicating the user to reward for applying a referral code. In this
* case, both the creator and applicant receive credit
*/
public static final int REFERRAL_CODE_LOCATION_BOTH = 3;
/**
* An {@link Integer} value indicating the calculation type of the referral code. In this case,
* the referral code can be applied continually.
*/
public static final int REFERRAL_CODE_AWARD_UNLIMITED = 1;
/**
* An {@link Integer} value indicating the calculation type of the referral code. In this case,
* a user can only apply a specific referral code once.
*/
public static final int REFERRAL_CODE_AWARD_UNIQUE = 0;
/**
* An {@link Integer} value indicating the link type. In this case, the link can be used an
* unlimited number of times.
*/
public static final int LINK_TYPE_UNLIMITED_USE = 0;
/**
* An {@link Integer} value indicating the link type. In this case, the link can be used only
* once. After initial use, subsequent attempts will not validate.
*/
public static final int LINK_TYPE_ONE_TIME_USE = 1;
/**
* <p>An {@link Integer} variable specifying the amount of time in milliseconds to keep a
* connection alive before assuming a timeout condition.</p>
*
* @see <a href="http://developer.android.com/reference/java/util/Timer.html#schedule(java.util.TimerTask, long)">
* Timer.schedule (TimerTask task, long delay)</a>
*/
private static final int SESSION_KEEPALIVE = 2000;
/**
* <p>An {@link Integer} value defining the timeout period in milliseconds to wait during a
* looping task before triggering an actual connection close during a session close action.</p>
*/
private static final int PREVENT_CLOSE_TIMEOUT = 500;
/* Json object containing key-value pairs for debugging deep linking */
private JSONObject deeplinkDebugParams_;
private static boolean disableDeviceIDFetch_;
private boolean enableFacebookAppLinkCheck_ = false;
private static boolean isSimulatingInstalls_;
private static boolean isLogging_ = false;
private static boolean checkInstallReferrer_ = false;
private static long PLAYSTORE_REFERRAL_FETCH_WAIT_FOR = 5000;
/**
* <p>A {@link Branch} object that is instantiated on init and holds the singleton instance of
* the class during application runtime.</p>
*/
private static Branch branchReferral_;
private BranchRemoteInterface kRemoteInterface_;
private PrefHelper prefHelper_;
private final SystemObserver systemObserver_;
private Context context_;
final Object lock;
private Semaphore serverSema_;
private ServerRequestQueue requestQueue_;
private int networkCount_;
private boolean hasNetwork_;
private Map<BranchLinkData, String> linkCache_;
private ScheduledFuture<?> appListingSchedule_;
/* Set to true when application is instantiating {@BranchApp} by extending or adding manifest entry. */
private static boolean isAutoSessionMode_ = false;
/* Set to true when {@link Activity} life cycle callbacks are registered. */
private static boolean isActivityLifeCycleCallbackRegistered_ = false;
/* Enumeration for defining session initialisation state. */
private enum SESSION_STATE {
INITIALISED, INITIALISING, UNINITIALISED
}
private enum INTENT_STATE {
PENDING,
READY
}
private INTENT_STATE intentState_ = INTENT_STATE.PENDING;
private boolean handleDelayedNewIntents_ = false;
/* Holds the current Session state. Default is set to UNINITIALISED. */
private SESSION_STATE initState_ = SESSION_STATE.UNINITIALISED;
/* Instance of share link manager to share links automatically with third party applications. */
private ShareLinkManager shareLinkManager_;
/* The current activity instance for the application.*/
WeakReference<Activity> currentActivityReference_;
/* Specifies the choice of user for isReferrable setting. used to determine the link click is referrable or not. See getAutoSession for usage */
private enum CUSTOM_REFERRABLE_SETTINGS {
USE_DEFAULT, REFERRABLE, NON_REFERRABLE
}
/* By default assume user want to use the default settings. Update this option when user specify custom referrable settings */
private static CUSTOM_REFERRABLE_SETTINGS customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
/* Key to indicate whether the Activity was launched by Branch or not. */
private static final String AUTO_DEEP_LINKED = "io.branch.sdk.auto_linked";
/* Key for Auto Deep link param. The activities which need to automatically deep linked should define in this in the activity metadata. */
private static final String AUTO_DEEP_LINK_KEY = "io.branch.sdk.auto_link_keys";
/* Path for $deeplink_path or $android_deeplink_path to auto deep link. The activities which need to automatically deep linked should define in this in the activity metadata. */
private static final String AUTO_DEEP_LINK_PATH = "io.branch.sdk.auto_link_path";
/* Key for disabling auto deep link feature. Setting this to true in manifest will disable auto deep linking feature. */
private static final String AUTO_DEEP_LINK_DISABLE = "io.branch.sdk.auto_link_disable";
/*Key for defining a request code for an activity. should be added as a metadata for an activity. This is used as a request code for launching a an activity on auto deep link. */
private static final String AUTO_DEEP_LINK_REQ_CODE = "io.branch.sdk.auto_link_request_code";
/* Request code used to launch and activity on auto deep linking unless DEF_AUTO_DEEP_LINK_REQ_CODE is not specified for teh activity in manifest.*/
private static final int DEF_AUTO_DEEP_LINK_REQ_CODE = 1501;
/* Sets to true when the init session params are reported to the app though call back.*/
private boolean isInitReportedThroughCallBack = false;
private final ConcurrentHashMap<String, String> instrumentationExtraData_;
/* Name of the key for getting Fabric Branch API key from string resource */
private static final String FABRIC_BRANCH_API_KEY = "io.branch.apiKey";
private boolean isGAParamsFetchInProgress_ = false;
private List<String> externalUriWhiteList_;
private List<String> skipExternalUriHosts_;
String sessionReferredLink_; // Link which opened this application session if opened by a link click.
private static String cookieBasedMatchDomain_ = "app.link"; // Domain name used for cookie based matching.
private static int LATCH_WAIT_UNTIL = 2500; //used for getLatestReferringParamsSync and getFirstReferringParamsSync, fail after this many milliseconds
/* List of keys whose values are collected from the Intent Extra.*/
private static final String[] EXTERNAL_INTENT_EXTRA_KEY_WHITE_LIST = new String[]{
"extra_launch_uri" // Key for embedded uri in FB ads triggered intents
};
private CountDownLatch getFirstReferringParamsLatch = null;
private CountDownLatch getLatestReferringParamsLatch = null;
/* Flag for checking of Strong matching is waiting on GAID fetch */
private boolean performCookieBasedStrongMatchingOnGAIDAvailable = false;
/**
* <p>The main constructor of the Branch class is private because the class uses the Singleton
* pattern.</p>
* <p/>
* <p>Use {@link #getInstance(Context) getInstance} method when instantiating.</p>
*
* @param context A {@link Context} from which this call was made.
*/
private Branch(@NonNull Context context) {
prefHelper_ = PrefHelper.getInstance(context);
kRemoteInterface_ = new BranchRemoteInterface(context);
systemObserver_ = new SystemObserver(context);
requestQueue_ = ServerRequestQueue.getInstance(context);
serverSema_ = new Semaphore(1);
lock = new Object();
networkCount_ = 0;
hasNetwork_ = true;
linkCache_ = new HashMap<>();
instrumentationExtraData_ = new ConcurrentHashMap<>();
isGAParamsFetchInProgress_ = systemObserver_.prefetchGAdsParams(this);
InstallListener.setListener(this);
// newIntent() delayed issue is only with Android M+ devices. So need to handle android M and above
// PRS: Since this seem more reliable and not causing any integration issues adding this to all supported SDK versions
if (android.os.Build.VERSION.SDK_INT >= 15) {
handleDelayedNewIntents_ = true;
intentState_ = INTENT_STATE.PENDING;
} else {
handleDelayedNewIntents_ = false;
intentState_ = INTENT_STATE.READY;
}
externalUriWhiteList_ = new ArrayList<>();
skipExternalUriHosts_ = new ArrayList<>();
}
/**
* <p>
* Enables/Disables the test mode for the SDK. This will use the Branch Test Keys.
* This will also enable debug logs.
* Note: This is same as setting "io.branch.sdk.TestMode" to "True" in Manifest file
* </p>
*/
public static void enableTestMode() {
BranchUtil.isCustomDebugEnabled_ = true;
}
public static void disableTestMode() {
BranchUtil.isCustomDebugEnabled_ = false;
}
public void setDebug() {
enableTestMode();
}
/**
* Since play store referrer broadcast from google play is few millisecond delayed, call this method to delay Branch init for more accurate
* tracking and attribution. This will delay branch init only the first time user open the app.
* Note: Recommend 1500 to capture more than 90% of the install referrer cases per our testing as of 4/2017
*
* @param delay {@link Long} Maximum wait time for install referrer broadcast in milli seconds
*/
public static void enablePlayStoreReferrer(long delay) {
checkInstallReferrer_ = true;
PLAYSTORE_REFERRAL_FETCH_WAIT_FOR = delay;
}
static boolean checkPlayStoreReferrer() {
return checkInstallReferrer_;
}
public static long getReferralFetchWaitTime() {
return PLAYSTORE_REFERRAL_FETCH_WAIT_FOR;
}
/**
* <p>Singleton method to return the pre-initialised object of the type {@link Branch}.
* Make sure your app is instantiating {@link BranchApp} before calling this method
* or you have created an instance of Branch already by calling getInstance(Context ctx).</p>
*
* @return An initialised singleton {@link Branch} object
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getInstance() {
/* Check if BranchApp is instantiated. */
if (branchReferral_ == null) {
Log.e("BranchSDK", "Branch instance is not created yet. Make sure you have initialised Branch. [Consider Calling getInstance(Context ctx) if you still have issue.]");
} else if (isAutoSessionMode_) {
/* Check if Activity life cycle callbacks are set if in auto session mode. */
if (!isActivityLifeCycleCallbackRegistered_) {
Log.e("BranchSDK", "Branch instance is not properly initialised. Make sure your Application class is extending BranchApp class. " +
"If you are not extending BranchApp class make sure you are initialising Branch in your Applications onCreate()");
}
}
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
*
* @param context A {@link Context} from which this call was made.
* @param branchKey Your Branch key as a {@link String}.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
* @see <a href="https://github.com/BranchMetrics/Branch-Android-SDK/blob/05e234855f983ae022633eb01989adb05775532e/README.md#add-your-app-key-to-your-project">
* Adding your app key to your project</a>
*/
public static Branch getInstance(@NonNull Context context, @NonNull String branchKey) {
if (branchReferral_ == null) {
branchReferral_ = Branch.initInstance(context);
}
branchReferral_.context_ = context.getApplicationContext();
if (branchKey.startsWith("key_")) {
boolean isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(branchKey);
//on setting a new key clear link cache and pending requests
if (isNewBranchKeySet) {
branchReferral_.linkCache_.clear();
branchReferral_.requestQueue_.clear();
}
} else {
Log.e("BranchSDK", "Branch Key is invalid.Please check your BranchKey");
}
return branchReferral_;
}
private static Branch getBranchInstance(@NonNull Context context, boolean isLive) {
if (branchReferral_ == null) {
branchReferral_ = Branch.initInstance(context);
String branchKey = branchReferral_.prefHelper_.readBranchKey(isLive);
boolean isNewBranchKeySet;
if (branchKey == null || branchKey.equalsIgnoreCase(PrefHelper.NO_STRING_VALUE)) {
// If Branch key is not available check for Fabric provided Branch key
String fabricBranchApiKey = null;
try {
Resources resources = context.getResources();
fabricBranchApiKey = resources.getString(resources.getIdentifier(FABRIC_BRANCH_API_KEY, "string", context.getPackageName()));
} catch (Exception ignore) {
}
if (!TextUtils.isEmpty(fabricBranchApiKey)) {
isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(fabricBranchApiKey);
} else {
Log.i("BranchSDK", "Branch Warning: Please enter your branch_key in your project's Manifest file!");
isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(PrefHelper.NO_STRING_VALUE);
}
} else {
isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(branchKey);
}
//on setting a new key clear link cache and pending requests
if (isNewBranchKeySet) {
branchReferral_.linkCache_.clear();
branchReferral_.requestQueue_.clear();
}
branchReferral_.context_ = context.getApplicationContext();
/* If {@link Application} is instantiated register for activity life cycle events. */
if (context instanceof Application) {
isAutoSessionMode_ = true;
branchReferral_.setActivityLifeCycleObserver((Application) context);
}
}
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
public static Branch getInstance(@NonNull Context context) {
return getBranchInstance(context, true);
}
/**
* <p>If you configured the your Strings file according to the guide, you'll be able to use
* the test version of your app by just calling this static method before calling initSession.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object.
*/
public static Branch getTestInstance(@NonNull Context context) {
return getBranchInstance(context, false);
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoInstance(@NonNull Context context) {
isAutoSessionMode_ = true;
customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
boolean isLive = !BranchUtil.isTestModeEnabled(context);
getBranchInstance(context, isLive);
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @param isReferrable A {@link Boolean} value indicating whether initialising a session on this Branch instance
* should be considered as potentially referrable or not. By default, a user is only referrable
* if initSession results in a fresh install. Overriding this gives you control of who is referrable.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoInstance(@NonNull Context context, boolean isReferrable) {
isAutoSessionMode_ = true;
customReferrableSettings_ = isReferrable ? CUSTOM_REFERRABLE_SETTINGS.REFERRABLE : CUSTOM_REFERRABLE_SETTINGS.NON_REFERRABLE;
boolean isDebug = BranchUtil.isTestModeEnabled(context);
getBranchInstance(context, !isDebug);
return branchReferral_;
}
/**
* <p>Singleton method to return the pre-initialised, or newly initialise and return, a singleton
* object of the type {@link Branch}.</p>
* <p>Use this whenever you need to call a method directly on the {@link Branch} object.</p>
*
* @param context A {@link Context} from which this call was made.
* @param branchKey A {@link String} value used to initialize Branch.
* @return An initialised {@link Branch} object, either fetched from a pre-initialised
* instance within the singleton class, or a newly instantiated object where
* one was not already requested during the current app lifecycle.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoInstance(@NonNull Context context, @NonNull String branchKey) {
isAutoSessionMode_ = true;
customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
boolean isLive = !BranchUtil.isTestModeEnabled(context);
getBranchInstance(context, isLive);
if (branchKey.startsWith("key_")) {
boolean isNewBranchKeySet = branchReferral_.prefHelper_.setBranchKey(branchKey);
//on setting a new key clear link cache and pending requests
if (isNewBranchKeySet) {
branchReferral_.linkCache_.clear();
branchReferral_.requestQueue_.clear();
}
} else {
Log.e("BranchSDK", "Branch Key is invalid.Please check your BranchKey");
}
return branchReferral_;
}
/**
* <p>If you configured the your Strings file according to the guide, you'll be able to use
* the test version of your app by just calling this static method before calling initSession.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoTestInstance(@NonNull Context context) {
isAutoSessionMode_ = true;
customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT;
getBranchInstance(context, false);
return branchReferral_;
}
/**
* <p>If you configured the your Strings file according to the guide, you'll be able to use
* the test version of your app by just calling this static method before calling initSession.</p>
*
* @param context A {@link Context} from which this call was made.
* @param isReferrable A {@link Boolean} value indicating whether initialising a session on this Branch instance
* should be considered as potentially referrable or not. By default, a user is only referrable
* if initSession results in a fresh install. Overriding this gives you control of who is referrable.
* @return An initialised {@link Branch} object.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static Branch getAutoTestInstance(@NonNull Context context, boolean isReferrable) {
isAutoSessionMode_ = true;
customReferrableSettings_ = isReferrable ? CUSTOM_REFERRABLE_SETTINGS.REFERRABLE : CUSTOM_REFERRABLE_SETTINGS.NON_REFERRABLE;
getBranchInstance(context, false);
return branchReferral_;
}
/**
* <p>Initialises an instance of the Branch object.</p>
*
* @param context A {@link Context} from which this call was made.
* @return An initialised {@link Branch} object.
*/
private static Branch initInstance(@NonNull Context context) {
return new Branch(context.getApplicationContext());
}
/**
* <p>Manually sets the {@link Boolean} value, that indicates that the Branch API connection has
* been initialised, to false - forcing re-initialisation.</p>
*/
public void resetUserSession() {
initState_ = SESSION_STATE.UNINITIALISED;
}
/**
* <p>Sets the number of times to re-attempt a timed-out request to the Branch API, before
* considering the request to have failed entirely. Default 5.</p>
*
* @param retryCount An {@link Integer} specifying the number of times to retry before giving
* up and declaring defeat.
*/
public void setRetryCount(int retryCount) {
if (prefHelper_ != null && retryCount >= 0) {
prefHelper_.setRetryCount(retryCount);
}
}
/**
* <p>Sets the amount of time in milliseconds to wait before re-attempting a timed-out request
* to the Branch API. Default 3000 ms.</p>
*
* @param retryInterval An {@link Integer} value specifying the number of milliseconds to
* wait before re-attempting a timed-out request.
*/
public void setRetryInterval(int retryInterval) {
if (prefHelper_ != null && retryInterval > 0) {
prefHelper_.setRetryInterval(retryInterval);
}
}
/**
* <p>Sets the duration in milliseconds that the system should wait for a response before considering
* any Branch API call to have timed out. Default 3000 ms.</p>
* <p>Increase this to perform better in low network speed situations, but at the expense of
* responsiveness to error situation.</p>
*
* @param timeout An {@link Integer} value specifying the number of milliseconds to wait before
* considering the request to have timed out.
*/
public void setNetworkTimeout(int timeout) {
if (prefHelper_ != null && timeout > 0) {
prefHelper_.setTimeout(timeout);
}
}
/**
* Method to control reading Android ID from device. Set this to true to disable reading the device id.
* This method should be called from your {@link Application#onCreate()} method before creating Branch auto instance by calling {@link Branch#getAutoInstance(Context)}
*
* @param deviceIdFetch {@link Boolean with value true to disable reading the Android id from device}
*/
public static void disableDeviceIDFetch(Boolean deviceIdFetch) {
disableDeviceIDFetch_ = deviceIdFetch;
}
/**
* Returns true if reading device id is disabled
*
* @return {@link Boolean} with value true to disable reading Andoid ID
*/
public static boolean isDeviceIDFetchDisabled() {
return disableDeviceIDFetch_;
}
/**
* Sets the key-value pairs for debugging the deep link. The key-value set in debug mode is given back with other deep link data on branch init session.
* This method should be called from onCreate() of activity which listens to Branch Init Session callbacks
*
* @param debugParams A {@link JSONObject} containing key-value pairs for debugging branch deep linking
*/
public void setDeepLinkDebugMode(JSONObject debugParams) {
deeplinkDebugParams_ = debugParams;
}
/**
* <p>Calls the {@link PrefHelper#disableExternAppListing()} on the local instance to prevent
* a list of installed apps from being returned to the Branch API.</p>
*/
public void disableAppList() {
prefHelper_.disableExternAppListing();
}
/**
* <p>
* Enable Facebook app link check operation during Branch initialisation.
* </p>
*/
public void enableFacebookAppLinkCheck() {
enableFacebookAppLinkCheck_ = true;
}
/**
* <p>Add key value pairs to all requests</p>
*/
public void setRequestMetadata(@NonNull String key, @NonNull String value) {
prefHelper_.setRequestMetadata(key, value);
}
/**
* <p>Initialises a session with the Branch API, assigning a {@link BranchUniversalReferralInitListener}
* to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called following
* successful (or unsuccessful) initialisation of the session with the Branch API.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback) {
return initSession(callback, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API, assigning a {@link BranchReferralInitListener}
* to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called following
* successful (or unsuccessful) initialisation of the session with the Branch API.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback) {
return initSession(callback, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API, passing the {@link Activity} and assigning a
* {@link BranchUniversalReferralInitListener} to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, Activity activity) {
if (customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT) {
initUserSessionInternal(callback, activity, true);
} else {
boolean isReferrable = customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.REFERRABLE;
initUserSessionInternal(callback, activity, isReferrable);
}
return true;
}
/**
* <p>Initialises a session with the Branch API, passing the {@link Activity} and assigning a
* {@link BranchReferralInitListener} to perform an action upon successful initialisation.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value, indicating <i>false</i> if initialisation is
* unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, Activity activity) {
if (customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT) {
initUserSessionInternal(callback, activity, true);
} else {
boolean isReferrable = customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.REFERRABLE;
initUserSessionInternal(callback, activity, isReferrable);
}
return true;
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, @NonNull Uri data) {
return initSession(callback, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchReferralInitListener callback, @NonNull Uri data) {
return initSession(callback, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
initSession(callback, activity);
return true;
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that will return <i>false</i> if the supplied
* <i>data</i> parameter cannot be handled successfully - i.e. is not of a
* valid URI format.
*/
public boolean initSession(BranchReferralInitListener callback, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession(callback, activity);
}
/**
* <p>Initialises a session with the Branch API, without a callback or {@link Activity}.</p>
*
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession() {
return initSession((Activity) null);
}
/**
* <p>Initialises a session with the Branch API, without a callback or {@link Activity}.</p>
*
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(Activity activity) {
return initSession((BranchReferralInitListener) null, activity);
}
/**
* <p>Initialises a session with the Branch API, with associated data from the supplied
* {@link Uri}.</p>
*
* @param data A {@link Uri} variable containing the details of the source link that
* led to this
* initialisation action.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSessionWithData(@NonNull Uri data) {
return initSessionWithData(data, null);
}
/**
* <p>Initialises a session with the Branch API, with associated data from the supplied
* {@link Uri}.</p>
*
* @param data A {@link Uri} variable containing the details of the source link that led to this
* initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSessionWithData(Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession((BranchReferralInitListener) null, activity);
}
/**
* <p>Initialises a session with the Branch API, specifying whether the initialisation can count
* as a referrable action.</p>
*
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(boolean isReferrable) {
return initSession((BranchReferralInitListener) null, isReferrable, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API, specifying whether the initialisation can count
* as a referrable action, and supplying the calling {@link Activity} for context.</p>
*
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(boolean isReferrable, @NonNull Activity activity) {
return initSession((BranchReferralInitListener) null, isReferrable, activity);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable, Uri data) {
return initSession(callback, isReferrable, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable, @NonNull Uri data) {
return initSession(callback, isReferrable, data, null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession(callback, isReferrable, activity);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param data A {@link Uri} variable containing the details of the source link that
* led to this initialisation action.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable, @NonNull Uri data, Activity activity) {
readAndStripParam(data, activity);
return initSession(callback, isReferrable, activity);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable) {
return initSession(callback, isReferrable, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable) {
return initSession(callback, isReferrable, (Activity) null);
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchUniversalReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchUniversalReferralInitListener callback, boolean isReferrable, Activity activity) {
initUserSessionInternal(callback, activity, isReferrable);
return true;
}
/**
* <p>Initialises a session with the Branch API.</p>
*
* @param callback A {@link BranchReferralInitListener} instance that will be called
* following successful (or unsuccessful) initialisation of the session
* with the Branch API.
* @param isReferrable A {@link Boolean} value indicating whether this initialisation
* session should be considered as potentially referrable or not.
* By default, a user is only referrable if initSession results in a
* fresh install. Overriding this gives you control of who is referrable.
* @param activity The calling {@link Activity} for context.
* @return A {@link Boolean} value that returns <i>false</i> if unsuccessful.
*/
public boolean initSession(BranchReferralInitListener callback, boolean isReferrable, Activity activity) {
initUserSessionInternal(callback, activity, isReferrable);
return true;
}
private void initUserSessionInternal(BranchUniversalReferralInitListener callback, Activity activity, boolean isReferrable) {
BranchUniversalReferralInitWrapper branchUniversalReferralInitWrapper = new BranchUniversalReferralInitWrapper(callback);
initUserSessionInternal(branchUniversalReferralInitWrapper, activity, isReferrable);
}
private void initUserSessionInternal(BranchReferralInitListener callback, Activity activity, boolean isReferrable) {
if (activity != null) {
currentActivityReference_ = new WeakReference<>(activity);
}
//If already initialised
if (hasUser() && hasSession() && initState_ == SESSION_STATE.INITIALISED) {
if (callback != null) {
if (isAutoSessionMode_) {
// Since Auto session mode initialise the session by itself on starting the first activity, we need to provide user
// the referring params if they call init session after init is completed. Note that user wont do InitSession per activity in auto session mode.
if (!isInitReportedThroughCallBack) { //Check if session params are reported already in case user call initsession form a different activity(not a noraml case)
callback.onInitFinished(getLatestReferringParams(), null);
isInitReportedThroughCallBack = true;
} else {
callback.onInitFinished(new JSONObject(), null);
}
} else {
// Since user will do init session per activity in non auto session mode , we don't want to repeat the referring params with each initSession()call.
callback.onInitFinished(new JSONObject(), null);
}
}
}
//If uninitialised or initialising
else {
// In case of Auto session init will be called from Branch before user. So initialising
// State also need to look for isReferrable value
if (isReferrable) {
this.prefHelper_.setIsReferrable();
} else {
this.prefHelper_.clearIsReferrable();
}
//If initialising ,then set new callbacks.
if (initState_ == SESSION_STATE.INITIALISING) {
if (callback != null) {
requestQueue_.setInstallOrOpenCallback(callback);
}
}
//if Uninitialised move request to the front if there is an existing request or create a new request.
else {
initState_ = SESSION_STATE.INITIALISING;
initializeSession(callback);
}
}
}
/**
* <p>Closes the current session, dependent on the state of the
* PrefHelper#getSmartSession() {@link Boolean} value. If <i>true</i>, take no action.
* If false, close the session via the {@link #executeClose()} method.</p>
* <p>Note that if smartSession is enabled, closeSession cannot be called within
* a 2 second time span of another Branch action. This has to do with the method that
* Branch uses to keep a session alive during Activity transitions</p>
*
* @deprecated This method is deprecated from SDK v1.14.6. Session Start and close are automatically handled by Branch.
* In case you need to handle sessions manually inorder to support minimum sdk version less than 14 please consider using
* SDK version 1.14.5
*/
public void closeSession() {
Log.w("BranchSDK", "closeSession() method is deprecated from SDK v1.14.6.Session is automatically handled by Branch." +
"In case you need to handle sessions manually inorder to support minimum sdk version less than 14 please consider using " +
" SDK version 1.14.5");
}
/*
* <p>Closes the current session. Should be called by on getting the last actvity onStop() event.
* </p>
*/
private void closeSessionInternal() {
executeClose();
sessionReferredLink_ = null;
if (prefHelper_.getExternAppListing()) {
if (appListingSchedule_ == null) {
scheduleListOfApps();
}
}
}
/**
* <p>
* Enabled Strong matching check using chrome cookies. This method should be called before
* Branch#getAutoInstance(Context).</p>
*
* @param cookieMatchDomain The domain for the url used to match the cookie (eg. example.app.link)
*/
public static void enableCookieBasedMatching(String cookieMatchDomain) {
cookieBasedMatchDomain_ = cookieMatchDomain;
}
/**
* <p>
* Enabled Strong matching check using chrome cookies. This method should be called before
* Branch#getAutoInstance(Context).</p>
*
* @param cookieMatchDomain The domain for the url used to match the cookie (eg. example.app.link)
* @param delay Time in millisecond to wait for the strong match to check to finish before Branch init session is called.
* Default time is 750 msec.
*/
public static void enableCookieBasedMatching(String cookieMatchDomain, int delay) {
cookieBasedMatchDomain_ = cookieMatchDomain;
BranchStrongMatchHelper.getInstance().setStrongMatchUrlHitDelay(delay);
}
/**
* <p>Perform the state-safe actions required to terminate any open session, and report the
* closed application event to the Branch API.</p>
*/
private void executeClose() {
if (initState_ != SESSION_STATE.UNINITIALISED) {
if (!hasNetwork_) {
// if there's no network connectivity, purge the old install/open
ServerRequest req = requestQueue_.peek();
if (req != null && (req instanceof ServerRequestRegisterInstall) || (req instanceof ServerRequestRegisterOpen)) {
requestQueue_.dequeue();
}
} else {
if (!requestQueue_.containsClose()) {
ServerRequest req = new ServerRequestRegisterClose(context_);
handleNewRequest(req);
}
}
initState_ = SESSION_STATE.UNINITIALISED;
}
}
private boolean readAndStripParam(Uri data, Activity activity) {
if (intentState_ == INTENT_STATE.READY) {
// Capture the intent URI and extra for analytics in case started by external intents such as google app search
try {
if (data != null) {
boolean foundSchemeMatch;
boolean skipThisHost = false;
if (externalUriWhiteList_.size() > 0) {
foundSchemeMatch = externalUriWhiteList_.contains(data.getScheme());
} else {
foundSchemeMatch = true;
}
if (skipExternalUriHosts_.size() > 0) {
for (String host : skipExternalUriHosts_) {
String externalHost = data.getHost();
if (externalHost != null && externalHost.equals(host)) {
skipThisHost = true;
break;
}
}
}
if (foundSchemeMatch && !skipThisHost) {
sessionReferredLink_ = data.toString();
prefHelper_.setExternalIntentUri(data.toString());
if (activity != null && activity.getIntent() != null && activity.getIntent().getExtras() != null) {
Bundle bundle = activity.getIntent().getExtras();
Set<String> extraKeys = bundle.keySet();
if (extraKeys.size() > 0) {
JSONObject extrasJson = new JSONObject();
for (String key : EXTERNAL_INTENT_EXTRA_KEY_WHITE_LIST) {
if (extraKeys.contains(key)) {
extrasJson.put(key, bundle.get(key));
}
}
if (extrasJson.length() > 0) {
prefHelper_.setExternalIntentExtra(extrasJson.toString());
}
}
}
}
}
} catch (Exception ignore) {
}
//Check for any push identifier in case app is launched by a push notification
try {
if (activity != null && activity.getIntent() != null && activity.getIntent().getExtras() != null) {
if (activity.getIntent().getExtras().getBoolean(Defines.Jsonkey.BranchLinkUsed.getKey()) == false) {
String pushIdentifier = activity.getIntent().getExtras().getString(Defines.Jsonkey.AndroidPushNotificationKey.getKey()); // This seems producing unmarshalling errors in some corner cases
if (pushIdentifier != null && pushIdentifier.length() > 0) {
prefHelper_.setPushIdentifier(pushIdentifier);
Intent thisIntent = activity.getIntent();
thisIntent.putExtra(Defines.Jsonkey.BranchLinkUsed.getKey(), true);
activity.setIntent(thisIntent);
return false;
}
}
}
} catch (Exception ignore) {
}
//Check for link click id or app link
if (data != null && data.isHierarchical() && activity != null) {
try {
if (data.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey()) != null) {
prefHelper_.setLinkClickIdentifier(data.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey()));
String paramString = "link_click_id=" + data.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey());
String uriString = null;
if (activity.getIntent() != null) {
uriString = activity.getIntent().getDataString();
}
if (data.getQuery().length() == paramString.length()) {
paramString = "\\?" + paramString;
} else if (uriString != null && (uriString.length() - paramString.length()) == uriString.indexOf(paramString)) {
paramString = "&" + paramString;
} else {
paramString = paramString + "&";
}
if (uriString != null) {
Uri newData = Uri.parse(uriString.replaceFirst(paramString, ""));
activity.getIntent().setData(newData);
} else {
Log.w(TAG, "Branch Warning. URI for the launcher activity is null. Please make sure that intent data is not set to null before calling Branch#InitSession ");
}
return true;
} else {
// Check if the clicked url is an app link pointing to this app
String scheme = data.getScheme();
Intent intent = activity.getIntent();
if (scheme != null && intent != null) {
// On Launching app from the recent apps, Android Start the app with the original intent data. So up in opening app from recent list
// Intent will have App link in data and lead to issue of getting wrong parameters. (In case of link click id since we are looking for actual link click on back end this case will never happen)
if ((intent.getFlags() & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0) {
if ((scheme.equalsIgnoreCase("http") || scheme.equalsIgnoreCase("https"))
&& data.getHost() != null && data.getHost().length() > 0 && !intent.getBooleanExtra(Defines.Jsonkey.BranchLinkUsed.getKey(), false)) {
prefHelper_.setAppLink(data.toString());
intent.putExtra(Defines.Jsonkey.BranchLinkUsed.getKey(), true);
activity.setIntent(intent);
return false;
}
}
}
}
} catch (Exception ignore) {
}
}
}
return false;
}
@Override
public void onGAdsFetchFinished() {
isGAParamsFetchInProgress_ = false;
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.GAID_FETCH_WAIT_LOCK);
if (performCookieBasedStrongMatchingOnGAIDAvailable) {
performCookieBasedStrongMatch();
performCookieBasedStrongMatchingOnGAIDAvailable = false;
} else {
processNextQueueItem();
}
}
@Override
public void onInstallReferrerEventsFinished() {
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.INSTALL_REFERRER_FETCH_WAIT_LOCK);
processNextQueueItem();
}
/**
* Add the given URI Scheme to the external Uri white list. Branch will collect
* external intent uri only if white list matches with the app opened URL properties
* If no URI is added to the white list branch will collect all external intent uris.
* White list schemes should be added immediately after calling {@link Branch#getAutoInstance(Context)}
*
* @param uriScheme {@link String} Case sensitive Uri scheme to be added to the external intent uri white list.(eg. "my_scheme://")
* @return {@link Branch} instance for successive method calls
*/
public Branch addWhiteListedScheme(String uriScheme) {
if (uriScheme == null) {
return this;
}
uriScheme = uriScheme.replace("://", "");
externalUriWhiteList_.add(uriScheme);
return this;
}
/**
* <p>Set the given list of URI Scheme as the external Uri white list. Branch will collect
* external intent uri only for Uris in white list.
* </p>
* If no URI is added to the white list branch will collect all external intent uris
* White list should be set immediately after calling {@link Branch#getAutoInstance(Context)}
* <!-- @param uriSchemes {@link List<String>} List of case sensitive Uri schemes to set as the white list -->
*
* @return {@link Branch} instance for successive method calls
*/
public Branch setWhiteListedSchemes(List<String> uriSchemes) {
externalUriWhiteList_ = uriSchemes;
return this;
}
/**
* <p>
* Add the given URI host to the external Uri skip list. Branch will not collect
* external intent uri if skip list contains with the app opened URL.
* If no host is added to the skip list, Branch will collect all external Intent uris.
* Skip list hosts should be added immediately after calling {@link Branch#getAutoInstance(Context)}.
* </p>
*
* @param hostName {@link String} Case sensitive Uri path to be added to the external Intent uri skip list. (e.g. "product" to skip my-scheme://product/*)
* @return {@link Branch} instance for successive method calls
*/
public Branch addUriHostsToSkip(String hostName) {
if ((hostName != null) && (!hostName.equals("")))
skipExternalUriHosts_.add(hostName);
return this;
}
/**
* <p>Identifies the current user to the Branch API by supplying a unique identifier as a
* {@link String} value. No callback.</p>
*
* @param userId A {@link String} value containing the unique identifier of the user.
*/
public void setIdentity(@NonNull String userId) {
setIdentity(userId, null);
}
/**
* <p>Identifies the current user to the Branch API by supplying a unique identifier as a
* {@link String} value, with a callback specified to perform a defined action upon successful
* response to request.</p>
*
* @param userId A {@link String} value containing the unique identifier of the user.
* @param callback A {@link BranchReferralInitListener} callback instance that will return
* the data associated with the user id being assigned, if available.
*/
public void setIdentity(@NonNull String userId, @Nullable BranchReferralInitListener
callback) {
ServerRequest req = new ServerRequestIdentifyUserRequest(context_, callback, userId);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
} else {
if (((ServerRequestIdentifyUserRequest) req).isExistingID()) {
((ServerRequestIdentifyUserRequest) req).handleUserExist(branchReferral_);
}
}
}
/**
* Indicates whether or not this user has a custom identity specified for them. Note that this is independent of installs.
* If you call setIdentity, this device will have that identity associated with this user until logout is called.
* This includes persisting through uninstalls, as we track device id.
*
* @return A {@link Boolean} value that will return <i>true</i> only if user already has an identity.
*/
public boolean isUserIdentified() {
return !prefHelper_.getIdentity().equals(PrefHelper.NO_STRING_VALUE);
}
/**
* <p>This method should be called if you know that a different person is about to use the app. For example,
* if you allow users to log out and let their friend use the app, you should call this to notify Branch
* to create a new user for this device. This will clear the first and latest params, as a new session is created.</p>
*/
public void logout() {
logout(null);
}
/**
* <p>This method should be called if you know that a different person is about to use the app. For example,
* if you allow users to log out and let their friend use the app, you should call this to notify Branch
* to create a new user for this device. This will clear the first and latest params, as a new session is created.</p>
*
* @param callback An instance of {@link io.branch.referral.Branch.LogoutStatusListener} to callback with the logout operation status.
*/
public void logout(LogoutStatusListener callback) {
ServerRequest req = new ServerRequestLogout(context_, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>Fire-and-forget retrieval of rewards for the current session. Without a callback.</p>
*/
public void loadRewards() {
loadRewards(null);
}
/**
* <p>Retrieves rewards for the current session, with a callback to perform a predefined
* action following successful report of state change. You'll then need to call getCredits
* in the callback to update the credit totals in your UX.</p>
*
* @param callback A {@link BranchReferralStateChangedListener} callback instance that will
* trigger actions defined therein upon a referral state change.
*/
public void loadRewards(BranchReferralStateChangedListener callback) {
ServerRequest req = new ServerRequestGetRewards(context_, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>Retrieve the number of credits available for the "default" bucket.</p>
*
* @return An {@link Integer} value of the number credits available in the "default" bucket.
*/
public int getCredits() {
return prefHelper_.getCreditCount();
}
/**
* Returns an {@link Integer} of the number of credits available for use within the supplied
* bucket name.
*
* @param bucket A {@link String} value indicating the name of the bucket to get credits for.
* @return An {@link Integer} value of the number credits available in the specified
* bucket.
*/
public int getCreditsForBucket(String bucket) {
return prefHelper_.getCreditCount(bucket);
}
/**
* <p>Redeems the specified number of credits from the "default" bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the bucket.
*/
public void redeemRewards(int count) {
redeemRewards(Defines.Jsonkey.DefaultBucket.getKey(), count, null);
}
/**
* <p>Redeems the specified number of credits from the "default" bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the bucket.
* @param callback A {@link BranchReferralStateChangedListener} callback instance that will
* trigger actions defined therein upon a executing redeem rewards.
*/
public void redeemRewards(int count, BranchReferralStateChangedListener callback) {
redeemRewards(Defines.Jsonkey.DefaultBucket.getKey(), count, callback);
}
/**
* <p>Redeems the specified number of credits from the named bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket to attempt
* to redeem credits from.
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the specified bucket.
*/
public void redeemRewards(@NonNull final String bucket, final int count) {
redeemRewards(bucket, count, null);
}
/**
* <p>Redeems the specified number of credits from the named bucket, if there are sufficient
* credits within it. If the number to redeem exceeds the number available in the bucket, all of
* the available credits will be redeemed instead.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket to attempt
* to redeem credits from.
* @param count A {@link Integer} specifying the number of credits to attempt to redeem from
* the specified bucket.
* @param callback A {@link BranchReferralStateChangedListener} callback instance that will
* trigger actions defined therein upon a executing redeem rewards.
*/
public void redeemRewards(@NonNull final String bucket,
final int count, BranchReferralStateChangedListener callback) {
ServerRequestRedeemRewards req = new ServerRequestRedeemRewards(context_, bucket, count, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(BranchListResponseListener callback) {
getCreditHistory(null, null, 100, CreditHistoryOrder.kMostRecentFirst, callback);
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket that the
* code will belong to.
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(@NonNull final String bucket, BranchListResponseListener
callback) {
getCreditHistory(bucket, null, 100, CreditHistoryOrder.kMostRecentFirst, callback);
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param afterId A {@link String} value containing the ID of the history record to begin after.
* This allows for a partial history to be retrieved, rather than the entire
* credit history of the bucket.
* @param length A {@link Integer} value containing the number of credit history records to
* return.
* @param order A {@link CreditHistoryOrder} object indicating which order the results should
* be returned in.
* <p>Valid choices:</p>
* <ul>
* <li>{@link CreditHistoryOrder#kMostRecentFirst}</li>
* <li>{@link CreditHistoryOrder#kLeastRecentFirst}</li>
* </ul>
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(@NonNull final String afterId, final int length,
@NonNull final CreditHistoryOrder order, BranchListResponseListener callback) {
getCreditHistory(null, afterId, length, order, callback);
}
/**
* <p>Gets the credit history of the specified bucket and triggers a callback to handle the
* response.</p>
*
* @param bucket A {@link String} value containing the name of the referral bucket that the
* code will belong to.
* @param afterId A {@link String} value containing the ID of the history record to begin after.
* This allows for a partial history to be retrieved, rather than the entire
* credit history of the bucket.
* @param length A {@link Integer} value containing the number of credit history records to
* return.
* @param order A {@link CreditHistoryOrder} object indicating which order the results should
* be returned in.
* <p>Valid choices:</p>
* <ul>
* <li>{@link CreditHistoryOrder#kMostRecentFirst}</li>
* <li>{@link CreditHistoryOrder#kLeastRecentFirst}</li>
* </ul>
* @param callback A {@link BranchListResponseListener} callback instance that will trigger
* actions defined therein upon receipt of a response to a create link request.
*/
public void getCreditHistory(final String bucket, final String afterId, final int length,
@NonNull final CreditHistoryOrder order, BranchListResponseListener callback) {
ServerRequest req = new ServerRequestGetRewardHistory(context_, bucket, afterId, length, order, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API, with additional app-defined meta data to go along with that action.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
* @param metadata A {@link JSONObject} containing app-defined meta-data to be attached to a
* user action that has just been completed.
*/
public void userCompletedAction(@NonNull final String action, JSONObject metadata) {
userCompletedAction(action, metadata, null);
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
*/
public void userCompletedAction(final String action) {
userCompletedAction(action, null, null);
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
* @param callback instance of {@link BranchViewHandler.IBranchViewEvents} to listen Branch view events
*/
public void userCompletedAction(final String action, BranchViewHandler.
IBranchViewEvents callback) {
userCompletedAction(action, null, callback);
}
/**
* <p>A void call to indicate that the user has performed a specific action and for that to be
* reported to the Branch API, with additional app-defined meta data to go along with that action.</p>
*
* @param action A {@link String} value to be passed as an action that the user has carried
* out. For example "registered" or "logged in".
* @param metadata A {@link JSONObject} containing app-defined meta-data to be attached to a
* user action that has just been completed.
* @param callback instance of {@link BranchViewHandler.IBranchViewEvents} to listen Branch view events
*/
public void userCompletedAction(@NonNull final String action, JSONObject
metadata, BranchViewHandler.IBranchViewEvents callback) {
if (metadata != null) {
metadata = BranchUtil.filterOutBadCharacters(metadata);
}
ServerRequest req = new ServerRequestActionCompleted(context_, action, metadata, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
public void sendCommerceEvent(@NonNull CommerceEvent commerceEvent, JSONObject
metadata, BranchViewHandler.IBranchViewEvents callback) {
if (metadata != null) {
metadata = BranchUtil.filterOutBadCharacters(metadata);
}
ServerRequest req = new ServerRequestRActionCompleted(context_, commerceEvent, metadata, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
public void sendCommerceEvent(@NonNull CommerceEvent commerceEvent) {
sendCommerceEvent(commerceEvent, null, null);
}
/**
* <p>Returns the parameters associated with the link that referred the user. This is only set once,
* the first time the user is referred by a link. Think of this as the user referral parameters.
* It is also only set if isReferrable is equal to true, which by default is only true
* on a fresh install (not upgrade or reinstall). This will change on setIdentity (if the
* user already exists from a previous device) and logout.</p>
*
* @return A {@link JSONObject} containing the install-time parameters as configured
* locally.
*/
public JSONObject getFirstReferringParams() {
String storedParam = prefHelper_.getInstallParams();
JSONObject firstReferringParams = convertParamsStringToDictionary(storedParam);
firstReferringParams = appendDebugParams(firstReferringParams);
return firstReferringParams;
}
/**
* <p>This function must be called from a non-UI thread! If Branch has no install link data,
* and this func is called, it will return data upon initializing, or until LATCH_WAIT_UNTIL.
* Returns the parameters associated with the link that referred the user. This is only set once,
* the first time the user is referred by a link. Think of this as the user referral parameters.
* It is also only set if isReferrable is equal to true, which by default is only true
* on a fresh install (not upgrade or reinstall). This will change on setIdentity (if the
* user already exists from a previous device) and logout.</p>
*
* @return A {@link JSONObject} containing the install-time parameters as configured
* locally.
*/
public JSONObject getFirstReferringParamsSync() {
getFirstReferringParamsLatch = new CountDownLatch(1);
if (prefHelper_.getInstallParams().equals(PrefHelper.NO_STRING_VALUE)) {
try {
getFirstReferringParamsLatch.await(LATCH_WAIT_UNTIL, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
}
}
String storedParam = prefHelper_.getInstallParams();
JSONObject firstReferringParams = convertParamsStringToDictionary(storedParam);
firstReferringParams = appendDebugParams(firstReferringParams);
getFirstReferringParamsLatch = null;
return firstReferringParams;
}
/**
* <p>Returns the parameters associated with the link that referred the session. If a user
* clicks a link, and then opens the app, initSession will return the parameters of the link
* and then set them in as the latest parameters to be retrieved by this method. By default,
* sessions persist for the duration of time that the app is in focus. For example, if you
* minimize the app, these parameters will be cleared when closeSession is called.</p>
*
* @return A {@link JSONObject} containing the latest referring parameters as
* configured locally.
*/
public JSONObject getLatestReferringParams() {
String storedParam = prefHelper_.getSessionParams();
JSONObject latestParams = convertParamsStringToDictionary(storedParam);
latestParams = appendDebugParams(latestParams);
return latestParams;
}
/**
* <p>This function must be called from a non-UI thread! If Branch has not been initialized
* and this func is called, it will return data upon initialization, or until LATCH_WAIT_UNTIL.
* Returns the parameters associated with the link that referred the session. If a user
* clicks a link, and then opens the app, initSession will return the parameters of the link
* and then set them in as the latest parameters to be retrieved by this method. By default,
* sessions persist for the duration of time that the app is in focus. For example, if you
* minimize the app, these parameters will be cleared when closeSession is called.</p>
*
* @return A {@link JSONObject} containing the latest referring parameters as
* configured locally.
*/
public JSONObject getLatestReferringParamsSync() {
getLatestReferringParamsLatch = new CountDownLatch(1);
try {
if (initState_ != SESSION_STATE.INITIALISED) {
getLatestReferringParamsLatch.await(LATCH_WAIT_UNTIL, TimeUnit.MILLISECONDS);
}
} catch (InterruptedException e) {
}
String storedParam = prefHelper_.getSessionParams();
JSONObject latestParams = convertParamsStringToDictionary(storedParam);
latestParams = appendDebugParams(latestParams);
getLatestReferringParamsLatch = null;
return latestParams;
}
/**
* Append the deep link debug params to the original params
*
* @param originalParams A {@link JSONObject} original referrer parameters
* @return A new {@link JSONObject} with debug params appended.
*/
private JSONObject appendDebugParams(JSONObject originalParams) {
try {
if (originalParams != null && deeplinkDebugParams_ != null) {
if (deeplinkDebugParams_.length() > 0) {
Log.w(TAG, "You're currently in deep link debug mode. Please comment out 'setDeepLinkDebugMode' to receive the deep link parameters from a real Branch link");
}
Iterator<String> keys = deeplinkDebugParams_.keys();
while (keys.hasNext()) {
String key = keys.next();
originalParams.put(key, deeplinkDebugParams_.get(key));
}
}
} catch (Exception ignore) {
}
return originalParams;
}
public JSONObject getDeeplinkDebugParams() {
if (deeplinkDebugParams_ != null && deeplinkDebugParams_.length() > 0) {
Log.w(TAG, "You're currently in deep link debug mode. Please comment out 'setDeepLinkDebugMode' to receive the deep link parameters from a real Branch link");
}
return deeplinkDebugParams_;
}
//-----------------Generate Short URL -------------------------------------------//
/**
* <p> Generates a shorl url for the given {@link ServerRequestCreateUrl} object </p>
*
* @param req An instance of {@link ServerRequestCreateUrl} with parameters create the short link.
* @return A url created with the given request if the request is synchronous else null.
* Note : This method can be used only internally. Use {@link BranchUrlBuilder} for creating short urls.
*/
String generateShortLinkInternal(ServerRequestCreateUrl req) {
if (!req.constructError_ && !req.handleErrors(context_)) {
if (linkCache_.containsKey(req.getLinkPost())) {
String url = linkCache_.get(req.getLinkPost());
req.onUrlAvailable(url);
return url;
} else {
if (req.isAsync()) {
generateShortLinkAsync(req);
} else {
return generateShortLinkSync(req);
}
}
}
return null;
}
/**
* <p>Creates options for sharing a link with other Applications. Creates a link with given attributes and shares with the
* user selected clients.</p>
*
* @param builder A {@link io.branch.referral.Branch.ShareLinkBuilder} instance to build share link.
*/
private void shareLink(ShareLinkBuilder builder) {
//Cancel any existing sharing in progress.
if (shareLinkManager_ != null) {
shareLinkManager_.cancelShareLinkDialog(true);
}
shareLinkManager_ = new ShareLinkManager();
shareLinkManager_.shareLink(builder);
}
/**
* <p>Cancel current share link operation and Application selector dialog. If your app is not using auto session management, make sure you are
* calling this method before your activity finishes inorder to prevent any window leak. </p>
*
* @param animateClose A {@link Boolean} to specify whether to close the dialog with an animation.
* A value of true will close the dialog with an animation. Setting this value
* to false will close the Dialog immediately.
*/
public void cancelShareLinkDialog(boolean animateClose) {
if (shareLinkManager_ != null) {
shareLinkManager_.cancelShareLinkDialog(animateClose);
}
}
// PRIVATE FUNCTIONS
private String convertDate(Date date) {
return android.text.format.DateFormat.format("yyyy-MM-dd", date).toString();
}
private String generateShortLinkSync(ServerRequestCreateUrl req) {
if (initState_ == SESSION_STATE.INITIALISED) {
ServerResponse response = null;
try {
int timeOut = prefHelper_.getTimeout() + 2000; // Time out is set to slightly more than link creation time to prevent any edge case
response = new getShortLinkTask().execute(req).get(timeOut, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException ignore) {
}
String url = null;
if (req.isDefaultToLongUrl()) {
url = req.getLongUrl();
}
if (response != null && response.getStatusCode() == HttpURLConnection.HTTP_OK) {
try {
url = response.getObject().getString("url");
if (req.getLinkPost() != null) {
linkCache_.put(req.getLinkPost(), url);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
return url;
} else {
Log.i("BranchSDK", "Branch Warning: User session has not been initialized");
}
return null;
}
private void generateShortLinkAsync(final ServerRequest req) {
handleNewRequest(req);
}
private JSONObject convertParamsStringToDictionary(String paramString) {
if (paramString.equals(PrefHelper.NO_STRING_VALUE)) {
return new JSONObject();
} else {
try {
return new JSONObject(paramString);
} catch (JSONException e) {
byte[] encodedArray = Base64.decode(paramString.getBytes(), Base64.NO_WRAP);
try {
return new JSONObject(new String(encodedArray));
} catch (JSONException ex) {
ex.printStackTrace();
return new JSONObject();
}
}
}
}
/**
* <p>Schedules a repeating threaded task to get the following details and report them to the
* Branch API <b>once a week</b>:</p>
* <p/>
* <pre style="background:#fff;padding:10px;border:2px solid silver;">
* int interval = 7 * 24 * 60 * 60;
* appListingSchedule_ = scheduler.scheduleAtFixedRate(
* periodicTask, (days * 24 + hours) * 60 * 60, interval, TimeUnit.SECONDS);</pre>
* <p/>
* <ul>
* <li>{@link SystemObserver#getOS()}</li>
* <li>{@link SystemObserver#getListOfApps()}</li>
* </ul>
*
* @see {@link SystemObserver}
* @see {@link PrefHelper}
*/
private void scheduleListOfApps() {
ScheduledThreadPoolExecutor scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1);
Runnable periodicTask = new Runnable() {
@Override
public void run() {
ServerRequest req = new ServerRequestSendAppList(context_);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
};
Date date = new Date();
Calendar calendar = GregorianCalendar.getInstance();
calendar.setTime(date);
int days = Calendar.SATURDAY - calendar.get(Calendar.DAY_OF_WEEK); // days to Saturday
int hours = 2 - calendar.get(Calendar.HOUR_OF_DAY); // hours to 2am, can be negative
if (days == 0 && hours < 0) {
days = 7;
}
int interval = 7 * 24 * 60 * 60;
appListingSchedule_ = scheduler.scheduleAtFixedRate(periodicTask, (days * 24 + hours) * 60 * 60, interval, TimeUnit.SECONDS);
}
private void processNextQueueItem() {
try {
serverSema_.acquire();
if (networkCount_ == 0 && requestQueue_.getSize() > 0) {
networkCount_ = 1;
ServerRequest req = requestQueue_.peek();
serverSema_.release();
if (req != null) {
if (!req.isWaitingOnProcessToFinish()) {
// All request except Install request need a valid IdentityID
if (!(req instanceof ServerRequestRegisterInstall) && !hasUser()) {
Log.i("BranchSDK", "Branch Error: User session has not been initialized!");
networkCount_ = 0;
handleFailure(requestQueue_.getSize() - 1, BranchError.ERR_NO_SESSION);
}
//All request except open and install need a session to execute
else if (!(req instanceof ServerRequestInitSession) && (!hasSession() || !hasDeviceFingerPrint())) {
networkCount_ = 0;
handleFailure(requestQueue_.getSize() - 1, BranchError.ERR_NO_SESSION);
} else {
BranchPostTask postTask = new BranchPostTask(req);
postTask.executeTask();
}
} else {
networkCount_ = 0;
}
} else {
requestQueue_.remove(null); //In case there is any request nullified remove it.
}
} else {
serverSema_.release();
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void handleFailure(int index, int statusCode) {
ServerRequest req;
if (index >= requestQueue_.getSize()) {
req = requestQueue_.peekAt(requestQueue_.getSize() - 1);
} else {
req = requestQueue_.peekAt(index);
}
handleFailure(req, statusCode);
}
private void handleFailure(final ServerRequest req, int statusCode) {
if (req == null)
return;
req.handleFailure(statusCode, "");
}
private void updateAllRequestsInQueue() {
try {
for (int i = 0; i < requestQueue_.getSize(); i++) {
ServerRequest req = requestQueue_.peekAt(i);
if (req != null) {
JSONObject reqJson = req.getPost();
if (reqJson != null) {
if (reqJson.has(Defines.Jsonkey.SessionID.getKey())) {
req.getPost().put(Defines.Jsonkey.SessionID.getKey(), prefHelper_.getSessionID());
}
if (reqJson.has(Defines.Jsonkey.IdentityID.getKey())) {
req.getPost().put(Defines.Jsonkey.IdentityID.getKey(), prefHelper_.getIdentityID());
}
if (reqJson.has(Defines.Jsonkey.DeviceFingerprintID.getKey())) {
req.getPost().put(Defines.Jsonkey.DeviceFingerprintID.getKey(), prefHelper_.getDeviceFingerPrintID());
}
}
}
}
} catch (JSONException e) {
e.printStackTrace();
}
}
private boolean hasSession() {
return !prefHelper_.getSessionID().equals(PrefHelper.NO_STRING_VALUE);
}
private boolean hasDeviceFingerPrint() {
return !prefHelper_.getDeviceFingerPrintID().equals(PrefHelper.NO_STRING_VALUE);
}
private boolean hasUser() {
return !prefHelper_.getIdentityID().equals(PrefHelper.NO_STRING_VALUE);
}
private void insertRequestAtFront(ServerRequest req) {
if (networkCount_ == 0) {
requestQueue_.insert(req, 0);
} else {
requestQueue_.insert(req, 1);
}
}
private void registerInstallOrOpen(ServerRequest req, BranchReferralInitListener callback) {
// If there isn't already an Open / Install request, add one to the queue
if (!requestQueue_.containsInstallOrOpen()) {
insertRequestAtFront(req);
}
// If there is already one in the queue, make sure it's in the front.
// Make sure a callback is associated with this request. This callback can
// be cleared if the app is terminated while an Open/Install is pending.
else {
// Update the callback to the latest one in initsession call
if (callback != null) {
requestQueue_.setInstallOrOpenCallback(callback);
}
requestQueue_.moveInstallOrOpenToFront(req, networkCount_, callback);
}
processNextQueueItem();
}
private void initializeSession(final BranchReferralInitListener callback) {
if ((prefHelper_.getBranchKey() == null || prefHelper_.getBranchKey().equalsIgnoreCase(PrefHelper.NO_STRING_VALUE))) {
initState_ = SESSION_STATE.UNINITIALISED;
//Report Key error on callback
if (callback != null) {
callback.onInitFinished(null, new BranchError("Trouble initializing Branch.", RemoteInterface.NO_BRANCH_KEY_STATUS));
}
Log.i("BranchSDK", "Branch Warning: Please enter your branch_key in your project's res/values/strings.xml!");
return;
} else if (prefHelper_.getBranchKey() != null && prefHelper_.getBranchKey().startsWith("key_test_")) {
Log.i("BranchSDK", "Branch Warning: You are using your test app's Branch Key. Remember to change it to live Branch Key during deployment.");
}
if (!prefHelper_.getExternalIntentUri().equals(PrefHelper.NO_STRING_VALUE) || !enableFacebookAppLinkCheck_) {
registerAppInit(callback, null);
} else {
// Check if opened by facebook with deferred install data
boolean appLinkRqSucceeded;
appLinkRqSucceeded = DeferredAppLinkDataHandler.fetchDeferredAppLinkData(context_, new DeferredAppLinkDataHandler.AppLinkFetchEvents() {
@Override
public void onAppLinkFetchFinished(String nativeAppLinkUrl) {
prefHelper_.setIsAppLinkTriggeredInit(true); // callback returns when app link fetch finishes with success or failure. Report app link checked in both cases
if (nativeAppLinkUrl != null) {
Uri appLinkUri = Uri.parse(nativeAppLinkUrl);
String bncLinkClickId = appLinkUri.getQueryParameter(Defines.Jsonkey.LinkClickID.getKey());
if (!TextUtils.isEmpty(bncLinkClickId)) {
prefHelper_.setLinkClickIdentifier(bncLinkClickId);
}
}
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.FB_APP_LINK_WAIT_LOCK);
processNextQueueItem();
}
});
if (appLinkRqSucceeded) {
registerAppInit(callback, ServerRequest.PROCESS_WAIT_LOCK.FB_APP_LINK_WAIT_LOCK);
} else {
registerAppInit(callback, null);
}
}
}
private void registerAppInit(BranchReferralInitListener
callback, ServerRequest.PROCESS_WAIT_LOCK lock) {
ServerRequest request;
if (hasUser()) {
// If there is user this is open
request = new ServerRequestRegisterOpen(context_, callback, kRemoteInterface_.getSystemObserver());
} else {
// If no user this is an Install
request = new ServerRequestRegisterInstall(context_, callback, kRemoteInterface_.getSystemObserver(), InstallListener.getInstallationID());
}
request.addProcessWaitLock(lock);
if (isGAParamsFetchInProgress_) {
request.addProcessWaitLock(ServerRequest.PROCESS_WAIT_LOCK.GAID_FETCH_WAIT_LOCK);
}
if (intentState_ != INTENT_STATE.READY) {
request.addProcessWaitLock(ServerRequest.PROCESS_WAIT_LOCK.INTENT_PENDING_WAIT_LOCK);
}
if (checkPlayStoreReferrer() && request instanceof ServerRequestRegisterInstall) {
request.addProcessWaitLock(ServerRequest.PROCESS_WAIT_LOCK.INSTALL_REFERRER_FETCH_WAIT_LOCK);
InstallListener.startInstallReferrerTime(PLAYSTORE_REFERRAL_FETCH_WAIT_FOR);
}
registerInstallOrOpen(request, callback);
}
private void onIntentReady(Activity activity) {
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.INTENT_PENDING_WAIT_LOCK);
if (activity.getIntent() != null) {
Uri intentData = activity.getIntent().getData();
readAndStripParam(intentData, activity);
if (cookieBasedMatchDomain_ != null && prefHelper_.getBranchKey() != null && !prefHelper_.getBranchKey().equalsIgnoreCase(PrefHelper.NO_STRING_VALUE)) {
if (isGAParamsFetchInProgress_) {
// Wait for GAID to Available
performCookieBasedStrongMatchingOnGAIDAvailable = true;
} else {
performCookieBasedStrongMatch();
}
} else {
processNextQueueItem();
}
} else {
processNextQueueItem();
}
}
private void performCookieBasedStrongMatch() {
boolean simulateInstall = (prefHelper_.getExternDebug() || isSimulatingInstalls());
DeviceInfo deviceInfo = DeviceInfo.getInstance(simulateInstall, systemObserver_, disableDeviceIDFetch_);
Activity currentActivity = null;
if (currentActivityReference_ != null) {
currentActivity = currentActivityReference_.get();
}
Context context = (currentActivity != null) ? currentActivity.getApplicationContext() : null;
if (context != null) {
requestQueue_.setStrongMatchWaitLock();
BranchStrongMatchHelper.getInstance().checkForStrongMatch(context, cookieBasedMatchDomain_, deviceInfo, prefHelper_, systemObserver_, new BranchStrongMatchHelper.StrongMatchCheckEvents() {
@Override
public void onStrongMatchCheckFinished() {
requestQueue_.unlockProcessWait(ServerRequest.PROCESS_WAIT_LOCK.STRONG_MATCH_PENDING_WAIT_LOCK);
processNextQueueItem();
}
});
}
}
/**
* Handles execution of a new request other than open or install.
* Checks for the session initialisation and adds a install/Open request in front of this request
* if the request need session to execute.
*
* @param req The {@link ServerRequest} to execute
*/
public void handleNewRequest(ServerRequest req) {
//If not initialised put an open or install request in front of this request(only if this needs session)
if (initState_ != SESSION_STATE.INITIALISED && !(req instanceof ServerRequestInitSession)) {
if ((req instanceof ServerRequestLogout)) {
req.handleFailure(BranchError.ERR_NO_SESSION, "");
Log.i(TAG, "Branch is not initialized, cannot logout");
return;
}
if ((req instanceof ServerRequestRegisterClose)) {
Log.i(TAG, "Branch is not initialized, cannot close session");
return;
} else {
Activity currentActivity = null;
if (currentActivityReference_ != null) {
currentActivity = currentActivityReference_.get();
}
if (customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.USE_DEFAULT) {
initUserSessionInternal((BranchReferralInitListener) null, currentActivity, true);
} else {
boolean isReferrable = customReferrableSettings_ == CUSTOM_REFERRABLE_SETTINGS.REFERRABLE;
initUserSessionInternal((BranchReferralInitListener) null, currentActivity, isReferrable);
}
}
}
requestQueue_.enqueue(req);
req.onRequestQueued();
processNextQueueItem();
}
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
private void setActivityLifeCycleObserver(Application application) {
try {
BranchActivityLifeCycleObserver activityLifeCycleObserver = new BranchActivityLifeCycleObserver();
/* Set an observer for activity life cycle events. */
application.unregisterActivityLifecycleCallbacks(activityLifeCycleObserver);
application.registerActivityLifecycleCallbacks(activityLifeCycleObserver);
isActivityLifeCycleCallbackRegistered_ = true;
} catch (NoSuchMethodError | NoClassDefFoundError Ex) {
isActivityLifeCycleCallbackRegistered_ = false;
isAutoSessionMode_ = false;
/* LifeCycleEvents are available only from API level 14. */
Log.w(TAG, new BranchError("", BranchError.ERR_API_LVL_14_NEEDED).getMessage());
}
}
/**
* <p>Class that observes activity life cycle events and determines when to start and stop
* session.</p>
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
private class BranchActivityLifeCycleObserver implements Application.ActivityLifecycleCallbacks {
private int activityCnt_ = 0; //Keep the count of live activities.
@Override
public void onActivityCreated(Activity activity, Bundle bundle) {
intentState_ = handleDelayedNewIntents_ ? INTENT_STATE.PENDING : INTENT_STATE.READY;
if (BranchViewHandler.getInstance().isInstallOrOpenBranchViewPending(activity.getApplicationContext())) {
BranchViewHandler.getInstance().showPendingBranchView(activity);
}
}
@Override
public void onActivityStarted(Activity activity) {
intentState_ = handleDelayedNewIntents_ ? INTENT_STATE.PENDING : INTENT_STATE.READY;
// If configured on dashboard, trigger content discovery runnable
if (initState_ == SESSION_STATE.INITIALISED) {
try {
ContentDiscoverer.getInstance().discoverContent(activity, sessionReferredLink_);
} catch (Exception ignore) {
}
}
if (activityCnt_ < 1) { // Check if this is the first Activity.If so start a session.
if (initState_ == SESSION_STATE.INITIALISED) {
// Handling case : init session completed previously when app was in background.
initState_ = SESSION_STATE.UNINITIALISED;
}
// Check if debug mode is set in manifest. If so enable debug.
if (BranchUtil.isTestModeEnabled(context_)) {
prefHelper_.setExternDebug();
}
prefHelper_.setLogging(getIsLogging());
startSession(activity);
} else if (checkIntentForSessionRestart(activity.getIntent())) { // Case of opening the app by clicking a push notification while app is in foreground
initState_ = SESSION_STATE.UNINITIALISED;
// no need call close here since it is session forced restart. Don't want to wait till close finish
startSession(activity);
}
activityCnt_++;
}
@Override
public void onActivityResumed(Activity activity) {
// Need to check here again for session restart request in case the intent is created while the activity is already running
if (checkIntentForSessionRestart(activity.getIntent())) {
initState_ = SESSION_STATE.UNINITIALISED;
startSession(activity);
}
currentActivityReference_ = new WeakReference<>(activity);
if (handleDelayedNewIntents_) {
intentState_ = INTENT_STATE.READY;
onIntentReady(activity);
}
}
@Override
public void onActivityPaused(Activity activity) {
/* Close any opened sharing dialog.*/
if (shareLinkManager_ != null) {
shareLinkManager_.cancelShareLinkDialog(true);
}
}
@Override
public void onActivityStopped(Activity activity) {
ContentDiscoverer.getInstance().onActivityStopped(activity);
activityCnt_--; // Check if this is the last activity. If so, stop the session.
if (activityCnt_ < 1) {
closeSessionInternal();
}
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle bundle) {
}
@Override
public void onActivityDestroyed(Activity activity) {
if (currentActivityReference_ != null && currentActivityReference_.get() == activity) {
currentActivityReference_.clear();
}
BranchViewHandler.getInstance().onCurrentActivityDestroyed(activity);
}
}
private void startSession(Activity activity) {
Uri intentData = null;
if (activity.getIntent() != null) {
intentData = activity.getIntent().getData();
}
initSessionWithData(intentData, activity); // indicate starting of session.
}
/*
* Check for forced session restart. The Branch session is restarted if the incoming intent has branch_force_new_session set to true.
* This is for supporting opening a deep link path while app is already running in the foreground. Such as clicking push notification while app in foreground.
*
*/
private boolean checkIntentForSessionRestart(Intent intent) {
boolean isRestartSessionRequested = false;
if (intent != null) {
isRestartSessionRequested = intent.getBooleanExtra(Defines.Jsonkey.ForceNewBranchSession.getKey(), false);
if (isRestartSessionRequested) {
intent.putExtra(Defines.Jsonkey.ForceNewBranchSession.getKey(), false);
}
}
return isRestartSessionRequested;
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchReferralInitListener}, defining a single method that takes a list of params in
* {@link JSONObject} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see JSONObject
* @see BranchError
*/
public interface BranchReferralInitListener {
void onInitFinished(JSONObject referringParams, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchUniversalReferralInitListener}, defining a single method that provides
* {@link BranchUniversalObject}, {@link LinkProperties} and an error message of {@link BranchError} format that will be
* returned on failure of the request response.
* In case of an error the value for {@link BranchUniversalObject} and {@link LinkProperties} are set to null.</p>
*
* @see BranchUniversalObject
* @see LinkProperties
* @see BranchError
*/
public interface BranchUniversalReferralInitListener {
void onInitFinished(BranchUniversalObject branchUniversalObject, LinkProperties linkProperties, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchReferralStateChangedListener}, defining a single method that takes a value of
* {@link Boolean} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see Boolean
* @see BranchError
*/
public interface BranchReferralStateChangedListener {
void onStateChanged(boolean changed, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchLinkCreateListener}, defining a single method that takes a URL
* {@link String} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see String
* @see BranchError
*/
public interface BranchLinkCreateListener {
void onLinkCreate(String url, BranchError error);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchLinkShareListener}, defining methods to listen for link sharing status.</p>
*/
public interface BranchLinkShareListener {
/**
* <p> Callback method to update when share link dialog is launched.</p>
*/
void onShareLinkDialogLaunched();
/**
* <p> Callback method to update when sharing dialog is dismissed.</p>
*/
void onShareLinkDialogDismissed();
/**
* <p> Callback method to update the sharing status. Called on sharing completed or on error.</p>
*
* @param sharedLink The link shared to the channel.
* @param sharedChannel Channel selected for sharing.
* @param error A {@link BranchError} to update errors, if there is any.
*/
void onLinkShareResponse(String sharedLink, String sharedChannel, BranchError error);
/**
* <p>Called when user select a channel for sharing a deep link.
* Branch will create a deep link for the selected channel and share with it after calling this
* method. On sharing complete, status is updated by onLinkShareResponse() callback. Consider
* having a sharing in progress UI if you wish to prevent user activity in the window between selecting a channel
* and sharing complete.</p>
*
* @param channelName Name of the selected application to share the link. An empty string is returned if unable to resolve selected client name.
*/
void onChannelSelected(String channelName);
}
/**
* <p>An interface class for customizing sharing properties with selected channel.</p>
*/
public interface IChannelProperties {
/**
* @param channel The name of the channel selected for sharing.
* @return {@link String} with value for the message title for sharing the link with the selected channel
*/
String getSharingTitleForChannel(String channel);
/**
* @param channel The name of the channel selected for sharing.
* @return {@link String} with value for the message body for sharing the link with the selected channel
*/
String getSharingMessageForChannel(String channel);
}
/**
* <p>An Interface class that is implemented by all classes that make use of
* {@link BranchListResponseListener}, defining a single method that takes a list of
* {@link JSONArray} format, and an error message of {@link BranchError} format that will be
* returned on failure of the request response.</p>
*
* @see JSONArray
* @see BranchError
*/
public interface BranchListResponseListener {
void onReceivingResponse(JSONArray list, BranchError error);
}
/**
* <p>
* Callback interface for listening logout status
* </p>
*/
public interface LogoutStatusListener {
/**
* Called on finishing the the logout process
*
* @param loggedOut A {@link Boolean} which is set to true if logout succeeded
* @param error An instance of {@link BranchError} to notify any error occurred during logout.
* A null value is set if logout succeeded.
*/
void onLogoutFinished(boolean loggedOut, BranchError error);
}
/**
* <p>enum containing the sort options for return of credit history.</p>
*/
public enum CreditHistoryOrder {
kMostRecentFirst, kLeastRecentFirst
}
/**
* Async Task to create a shorlink for synchronous methods
*/
private class getShortLinkTask extends AsyncTask<ServerRequest, Void, ServerResponse> {
@Override
protected ServerResponse doInBackground(ServerRequest... serverRequests) {
return kRemoteInterface_.createCustomUrlSync(serverRequests[0].getPost());
}
}
/**
* Asynchronous task handling execution of server requests. Execute the network task on background
* thread and request are executed in sequential manner. Handles the request execution in
* Synchronous-Asynchronous pattern. Should be invoked only form main thread and the results are
* published in the main thread.
*/
private class BranchPostTask extends BranchAsyncTask<Void, Void, ServerResponse> {
int timeOut_ = 0;
ServerRequest thisReq_;
public BranchPostTask(ServerRequest request) {
thisReq_ = request;
timeOut_ = prefHelper_.getTimeout();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
thisReq_.onPreExecute();
}
@Override
protected ServerResponse doInBackground(Void... voids) {
if (thisReq_ instanceof ServerRequestInitSession) {
((ServerRequestInitSession) thisReq_).updateLinkReferrerParams();
}
//Update queue wait time
addExtraInstrumentationData(thisReq_.getRequestPath() + "-" + Defines.Jsonkey.Queue_Wait_Time.getKey(), String.valueOf(thisReq_.getQueueWaitTime()));
//Google ADs ID and LAT value are updated using reflection. These method need background thread
//So updating them for install and open on background thread.
if (thisReq_.isGAdsParamsRequired() && !BranchUtil.isTestModeEnabled(context_)) {
thisReq_.updateGAdsParams(systemObserver_);
}
if (thisReq_.isGetRequest()) {
return kRemoteInterface_.make_restful_get(thisReq_.getRequestUrl(), thisReq_.getGetParams(), thisReq_.getRequestPath(), timeOut_);
} else {
return kRemoteInterface_.make_restful_post(thisReq_.getPostWithInstrumentationValues(instrumentationExtraData_), thisReq_.getRequestUrl(), thisReq_.getRequestPath(), timeOut_);
}
}
@Override
protected void onPostExecute(ServerResponse serverResponse) {
super.onPostExecute(serverResponse);
if (serverResponse != null) {
try {
int status = serverResponse.getStatusCode();
hasNetwork_ = true;
//If the request is not succeeded
if (status != 200) {
//If failed request is an initialisation request then mark session not initialised
if (thisReq_ instanceof ServerRequestInitSession) {
initState_ = SESSION_STATE.UNINITIALISED;
}
// On a bad request notify with call back and remove the request.
if (status == 409) {
requestQueue_.remove(thisReq_);
if (thisReq_ instanceof ServerRequestCreateUrl) {
((ServerRequestCreateUrl) thisReq_).handleDuplicateURLError();
} else {
Log.i("BranchSDK", "Branch API Error: Conflicting resource error code from API");
handleFailure(0, status);
}
}
//On Network error or Branch is down fail all the pending requests in the queue except
//for request which need to be replayed on failure.
else {
hasNetwork_ = false;
//Collect all request from the queue which need to be failed.
ArrayList<ServerRequest> requestToFail = new ArrayList<>();
for (int i = 0; i < requestQueue_.getSize(); i++) {
requestToFail.add(requestQueue_.peekAt(i));
}
//Remove the requests from the request queue first
for (ServerRequest req : requestToFail) {
if (req == null || !req.shouldRetryOnFail()) { // Should remove any nullified request object also from queque
requestQueue_.remove(req);
}
}
// Then, set the network count to zero, indicating that requests can be started again.
networkCount_ = 0;
//Finally call the request callback with the error.
for (ServerRequest req : requestToFail) {
if (req != null) {
req.handleFailure(status, serverResponse.getFailReason());
//If request need to be replayed, no need for the callbacks
if (req.shouldRetryOnFail())
req.clearCallbacks();
}
}
}
}
//If the request succeeded
else {
hasNetwork_ = true;
//On create new url cache the url.
if (thisReq_ instanceof ServerRequestCreateUrl) {
if (serverResponse.getObject() != null) {
final String url = serverResponse.getObject().getString("url");
// cache the link
linkCache_.put(((ServerRequestCreateUrl) thisReq_).getLinkPost(), url);
}
}
//On Logout clear the link cache and all pending requests
else if (thisReq_ instanceof ServerRequestLogout) {
linkCache_.clear();
requestQueue_.clear();
}
requestQueue_.dequeue();
// If this request changes a session update the session-id to queued requests.
if (thisReq_ instanceof ServerRequestInitSession
|| thisReq_ instanceof ServerRequestIdentifyUserRequest) {
// Immediately set session and Identity and update the pending request with the params
JSONObject respJson = serverResponse.getObject();
if (respJson != null) {
boolean updateRequestsInQueue = false;
if (respJson.has(Defines.Jsonkey.SessionID.getKey())) {
prefHelper_.setSessionID(respJson.getString(Defines.Jsonkey.SessionID.getKey()));
updateRequestsInQueue = true;
}
if (respJson.has(Defines.Jsonkey.IdentityID.getKey())) {
String new_Identity_Id = respJson.getString(Defines.Jsonkey.IdentityID.getKey());
if (!prefHelper_.getIdentityID().equals(new_Identity_Id)) {
//On setting a new identity Id clear the link cache
linkCache_.clear();
prefHelper_.setIdentityID(respJson.getString(Defines.Jsonkey.IdentityID.getKey()));
updateRequestsInQueue = true;
}
}
if (respJson.has(Defines.Jsonkey.DeviceFingerprintID.getKey())) {
prefHelper_.setDeviceFingerPrintID(respJson.getString(Defines.Jsonkey.DeviceFingerprintID.getKey()));
updateRequestsInQueue = true;
}
if (updateRequestsInQueue) {
updateAllRequestsInQueue();
}
if (thisReq_ instanceof ServerRequestInitSession) {
initState_ = SESSION_STATE.INITIALISED;
thisReq_.onRequestSucceeded(serverResponse, branchReferral_);
// Publish success to listeners
isInitReportedThroughCallBack = ((ServerRequestInitSession) thisReq_).hasCallBack();
if (!((ServerRequestInitSession) thisReq_).handleBranchViewIfAvailable((serverResponse))) {
checkForAutoDeepLinkConfiguration();
}
// Count down the latch holding getLatestReferringParamsSync
if (getLatestReferringParamsLatch != null) {
getLatestReferringParamsLatch.countDown();
}
// Count down the latch holding getFirstReferringParamsSync
if (getFirstReferringParamsLatch != null) {
getFirstReferringParamsLatch.countDown();
}
} else {
// For setting identity just call only request succeeded
thisReq_.onRequestSucceeded(serverResponse, branchReferral_);
}
}
} else {
//Publish success to listeners
thisReq_.onRequestSucceeded(serverResponse, branchReferral_);
}
}
networkCount_ = 0;
if (hasNetwork_ && initState_ != SESSION_STATE.UNINITIALISED) {
processNextQueueItem();
}
} catch (JSONException ex) {
ex.printStackTrace();
}
}
}
}
//-------------------Auto deep link feature-------------------------------------------//
/**
* <p>Checks if an activity is launched by Branch auto deep link feature. Branch launches activitie configured for auto deep link on seeing matching keys.
* Keys for auto deep linking should be specified to each activity as a meta data in manifest.</p>
* Configure your activity in your manifest to enable auto deep linking as follows
* <!--
* <activity android:name=".YourActivity">
* <meta-data android:name="io.branch.sdk.auto_link" android:value="DeepLinkKey1","DeepLinkKey2" />
* </activity>
* -->
*
* @param activity Instance of activity to check if launched on auto deep link.
* @return A {Boolean} value whose value is true if this activity is launched by Branch auto deeplink feature.
*/
public static boolean isAutoDeepLinkLaunch(Activity activity) {
return (activity.getIntent().getStringExtra(AUTO_DEEP_LINKED) != null);
}
private void checkForAutoDeepLinkConfiguration() {
JSONObject latestParams = getLatestReferringParams();
String deepLinkActivity = null;
try {
//Check if the application is launched by clicking a Branch link.
if (!latestParams.has(Defines.Jsonkey.Clicked_Branch_Link.getKey())
|| !latestParams.getBoolean(Defines.Jsonkey.Clicked_Branch_Link.getKey())) {
return;
}
if (latestParams.length() > 0) {
// Check if auto deep link is disabled.
ApplicationInfo appInfo = context_.getPackageManager().getApplicationInfo(context_.getPackageName(), PackageManager.GET_META_DATA);
if (appInfo.metaData != null && appInfo.metaData.getBoolean(AUTO_DEEP_LINK_DISABLE, false)) {
return;
}
PackageInfo info = context_.getPackageManager().getPackageInfo(context_.getPackageName(), PackageManager.GET_ACTIVITIES | PackageManager.GET_META_DATA);
ActivityInfo[] activityInfos = info.activities;
int deepLinkActivityReqCode = DEF_AUTO_DEEP_LINK_REQ_CODE;
if (activityInfos != null) {
for (ActivityInfo activityInfo : activityInfos) {
if (activityInfo != null && activityInfo.metaData != null && (activityInfo.metaData.getString(AUTO_DEEP_LINK_KEY) != null || activityInfo.metaData.getString(AUTO_DEEP_LINK_PATH) != null)) {
if (checkForAutoDeepLinkKeys(latestParams, activityInfo) || checkForAutoDeepLinkPath(latestParams, activityInfo)) {
deepLinkActivity = activityInfo.name;
deepLinkActivityReqCode = activityInfo.metaData.getInt(AUTO_DEEP_LINK_REQ_CODE, DEF_AUTO_DEEP_LINK_REQ_CODE);
break;
}
}
}
}
if (deepLinkActivity != null && currentActivityReference_ != null) {
Activity currentActivity = currentActivityReference_.get();
if (currentActivity != null) {
Intent intent = new Intent(currentActivity, Class.forName(deepLinkActivity));
intent.putExtra(AUTO_DEEP_LINKED, "true");
// Put the raw JSON params as extra in case need to get the deep link params as JSON String
intent.putExtra(Defines.Jsonkey.ReferringData.getKey(), latestParams.toString());
// Add individual parameters in the data
Iterator<?> keys = latestParams.keys();
while (keys.hasNext()) {
String key = (String) keys.next();
intent.putExtra(key, latestParams.getString(key));
}
currentActivity.startActivityForResult(intent, deepLinkActivityReqCode);
} else {
// This case should not happen. Adding a safe handling for any corner case
Log.w(TAG, "No activity reference to launch deep linked activity");
}
}
}
} catch (final PackageManager.NameNotFoundException e) {
Log.i("BranchSDK", "Branch Warning: Please make sure Activity names set for auto deep link are correct!");
} catch (ClassNotFoundException e) {
Log.i("BranchSDK", "Branch Warning: Please make sure Activity names set for auto deep link are correct! Error while looking for activity " + deepLinkActivity);
} catch (Exception ignore) {
// Can get TransactionTooLarge Exception here if the Application info exceeds 1mb binder data limit. Usually results with manifest merge from SDKs
}
}
private boolean checkForAutoDeepLinkKeys(JSONObject params, ActivityInfo activityInfo) {
if (activityInfo.metaData.getString(AUTO_DEEP_LINK_KEY) != null) {
String[] activityLinkKeys = activityInfo.metaData.getString(AUTO_DEEP_LINK_KEY).split(",");
for (String activityLinkKey : activityLinkKeys) {
if (params.has(activityLinkKey)) {
return true;
}
}
}
return false;
}
private boolean checkForAutoDeepLinkPath(JSONObject params, ActivityInfo activityInfo) {
String deepLinkPath = null;
try {
if (params.has(Defines.Jsonkey.AndroidDeepLinkPath.getKey())) {
deepLinkPath = params.getString(Defines.Jsonkey.AndroidDeepLinkPath.getKey());
} else if (params.has(Defines.Jsonkey.DeepLinkPath.getKey())) {
deepLinkPath = params.getString(Defines.Jsonkey.DeepLinkPath.getKey());
}
} catch (JSONException ignored) {
}
if (activityInfo.metaData.getString(AUTO_DEEP_LINK_PATH) != null && deepLinkPath != null) {
String[] activityLinkPaths = activityInfo.metaData.getString(AUTO_DEEP_LINK_PATH).split(",");
for (String activityLinkPath : activityLinkPaths) {
if (pathMatch(activityLinkPath.trim(), deepLinkPath)) {
return true;
}
}
}
return false;
}
private boolean pathMatch(String templatePath, String path) {
boolean matched = true;
String[] pathSegmentsTemplate = templatePath.split("\\?")[0].split("/");
String[] pathSegmentsTarget = path.split("\\?")[0].split("/");
if (pathSegmentsTemplate.length != pathSegmentsTarget.length) {
return false;
}
for (int i = 0; i < pathSegmentsTemplate.length && i < pathSegmentsTarget.length; i++) {
String pathSegmentTemplate = pathSegmentsTemplate[i];
String pathSegmentTarget = pathSegmentsTarget[i];
if (!pathSegmentTemplate.equals(pathSegmentTarget) && !pathSegmentTemplate.contains("*")) {
matched = false;
break;
}
}
return matched;
}
public static void enableSimulateInstalls() {
isSimulatingInstalls_ = true;
}
public static void disableSimulateInstalls() {
isSimulatingInstalls_ = false;
}
public static boolean isSimulatingInstalls() {
return isSimulatingInstalls_;
}
public static void enableLogging() {
isLogging_ = true;
}
public static void disableLogging() {
isLogging_ = false;
}
public static boolean getIsLogging() {
return isLogging_;
}
//-------------------------- Branch Builders--------------------------------------//
/**
* <p> Class for building a share link dialog.This creates a chooser for selecting application for
* sharing a link created with given parameters. </p>
*/
public static class ShareLinkBuilder {
private final Activity activity_;
private final Branch branch_;
private String shareMsg_;
private String shareSub_;
private Branch.BranchLinkShareListener callback_ = null;
private Branch.IChannelProperties channelPropertiesCallback_ = null;
private ArrayList<SharingHelper.SHARE_WITH> preferredOptions_;
private String defaultURL_;
//Customise more and copy url option
private Drawable moreOptionIcon_;
private String moreOptionText_;
private Drawable copyUrlIcon_;
private String copyURlText_;
private String urlCopiedMessage_;
private int styleResourceID_;
private boolean setFullWidthStyle_;
private int dividerHeight = -1;
private String sharingTitle = null;
private View sharingTitleView = null;
BranchShortLinkBuilder shortLinkBuilder_;
private List<String> includeInShareSheet = new ArrayList<>();
private List<String> excludeFromShareSheet = new ArrayList<>();
/**
* <p>Creates options for sharing a link with other Applications. Creates a builder for sharing the link with
* user selected clients</p>
*
* @param activity The {@link Activity} to show the dialog for choosing sharing application.
* @param parameters A {@link JSONObject} value containing the deep link params.
*/
public ShareLinkBuilder(Activity activity, JSONObject parameters) {
this.activity_ = activity;
this.branch_ = branchReferral_;
shortLinkBuilder_ = new BranchShortLinkBuilder(activity);
try {
Iterator<String> keys = parameters.keys();
while (keys.hasNext()) {
String key = keys.next();
shortLinkBuilder_.addParameters(key, (String) parameters.get(key));
}
} catch (Exception ignore) {
}
shareMsg_ = "";
callback_ = null;
channelPropertiesCallback_ = null;
preferredOptions_ = new ArrayList<>();
defaultURL_ = null;
moreOptionIcon_ = BranchUtil.getDrawable(activity.getApplicationContext(), android.R.drawable.ic_menu_more);
moreOptionText_ = "More...";
copyUrlIcon_ = BranchUtil.getDrawable(activity.getApplicationContext(), android.R.drawable.ic_menu_save);
copyURlText_ = "Copy link";
urlCopiedMessage_ = "Copied link to clipboard!";
}
/**
* *<p>Creates options for sharing a link with other Applications. Creates a builder for sharing the link with
* user selected clients</p>
*
* @param activity The {@link Activity} to show the dialog for choosing sharing application.
* @param shortLinkBuilder An instance of {@link BranchShortLinkBuilder} to create link to be shared
*/
public ShareLinkBuilder(Activity activity, BranchShortLinkBuilder shortLinkBuilder) {
this(activity, new JSONObject());
shortLinkBuilder_ = shortLinkBuilder;
}
/**
* <p>Sets the message to be shared with the link.</p>
*
* @param message A {@link String} to be shared with the link
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setMessage(String message) {
this.shareMsg_ = message;
return this;
}
/**
* <p>Sets the subject of this message. This will be added to Email and SMS Application capable of handling subject in the message.</p>
*
* @param subject A {@link String} subject of this message.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setSubject(String subject) {
this.shareSub_ = subject;
return this;
}
/**
* <p>Adds the given tag an iterable {@link Collection} of {@link String} tags associated with a deep
* link.</p>
*
* @param tag A {@link String} to be added to the iterable {@link Collection} of {@link String} tags associated with a deep
* link.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addTag(String tag) {
this.shortLinkBuilder_.addTag(tag);
return this;
}
/**
* <p>Adds the given tag an iterable {@link Collection} of {@link String} tags associated with a deep
* link.</p>
*
* @param tags A {@link java.util.List} of tags to be added to the iterable {@link Collection} of {@link String} tags associated with a deep
* link.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addTags(ArrayList<String> tags) {
this.shortLinkBuilder_.addTags(tags);
return this;
}
/**
* <p>Adds a feature that make use of the link.</p>
*
* @param feature A {@link String} value identifying the feature that the link makes use of.
* Should not exceed 128 characters.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setFeature(String feature) {
this.shortLinkBuilder_.setFeature(feature);
return this;
}
/**
* <p>Adds a stage application or user flow associated with this link.</p>
*
* @param stage A {@link String} value identifying the stage in an application or user flow
* process. Should not exceed 128 characters.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setStage(String stage) {
this.shortLinkBuilder_.setStage(stage);
return this;
}
/**
* <p>Adds a callback to get the sharing status.</p>
*
* @param callback A {@link BranchLinkShareListener} instance for getting sharing status.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setCallback(BranchLinkShareListener callback) {
this.callback_ = callback;
return this;
}
/**
* @param channelPropertiesCallback A {@link io.branch.referral.Branch.IChannelProperties} instance for customizing sharing properties for channels.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setChannelProperties(IChannelProperties channelPropertiesCallback) {
this.channelPropertiesCallback_ = channelPropertiesCallback;
return this;
}
/**
* <p>Adds application to the preferred list of applications which are shown on share dialog.
* Only these options will be visible when the application selector dialog launches. Other options can be
* accessed by clicking "More"</p>
*
* @param preferredOption A list of applications to be added as preferred options on the app chooser.
* Preferred applications are defined in {@link io.branch.referral.SharingHelper.SHARE_WITH}.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addPreferredSharingOption(SharingHelper.SHARE_WITH preferredOption) {
this.preferredOptions_.add(preferredOption);
return this;
}
/**
* <p>Adds application to the preferred list of applications which are shown on share dialog.
* Only these options will be visible when the application selector dialog launches. Other options can be
* accessed by clicking "More"</p>
*
* @param preferredOptions A list of applications to be added as preferred options on the app chooser.
* Preferred applications are defined in {@link io.branch.referral.SharingHelper.SHARE_WITH}.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addPreferredSharingOptions(ArrayList<SharingHelper.SHARE_WITH> preferredOptions) {
this.preferredOptions_.addAll(preferredOptions);
return this;
}
/**
* Add the given key value to the deep link parameters
*
* @param key A {@link String} with value for the key for the deep link params
* @param value A {@link String} with deep link parameters value
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder addParam(String key, String value) {
try {
this.shortLinkBuilder_.addParameters(key, value);
} catch (Exception ignore) {
}
return this;
}
/**
* <p> Set a default url to share in case there is any error creating the deep link </p>
*
* @param url A {@link String} with value of default url to be shared with the selected application in case deep link creation fails.
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setDefaultURL(String url) {
defaultURL_ = url;
return this;
}
/**
* <p> Set the icon and label for the option to expand the application list to see more options.
* Default label is set to "More" </p>
*
* @param icon Drawable to set as the icon for more option. Default icon is system menu_more icon.
* @param label A {@link String} with value for the more option label. Default label is "More"
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setMoreOptionStyle(Drawable icon, String label) {
moreOptionIcon_ = icon;
moreOptionText_ = label;
return this;
}
/**
* <p> Set the icon and label for the option to expand the application list to see more options.
* Default label is set to "More" </p>
*
* @param drawableIconID Resource ID for the drawable to set as the icon for more option. Default icon is system menu_more icon.
* @param stringLabelID Resource ID for String label for the more option. Default label is "More"
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setMoreOptionStyle(int drawableIconID, int stringLabelID) {
moreOptionIcon_ = BranchUtil.getDrawable(activity_.getApplicationContext(), drawableIconID);
moreOptionText_ = activity_.getResources().getString(stringLabelID);
return this;
}
/**
* <p> Set the icon, label and success message for copy url option. Default label is "Copy link".</p>
*
* @param icon Drawable to set as the icon for copy url option. Default icon is system menu_save icon
* @param label A {@link String} with value for the copy url option label. Default label is "Copy link"
* @param message A {@link String} with value for a toast message displayed on copying a url.
* Default message is "Copied link to clipboard!"
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setCopyUrlStyle(Drawable icon, String label, String message) {
copyUrlIcon_ = icon;
copyURlText_ = label;
urlCopiedMessage_ = message;
return this;
}
/**
* <p> Set the icon, label and success message for copy url option. Default label is "Copy link".</p>
*
* @param drawableIconID Resource ID for the drawable to set as the icon for copy url option. Default icon is system menu_save icon
* @param stringLabelID Resource ID for the string label the copy url option. Default label is "Copy link"
* @param stringMessageID Resource ID for the string message to show toast message displayed on copying a url
* @return A {@link io.branch.referral.Branch.ShareLinkBuilder} instance.
*/
public ShareLinkBuilder setCopyUrlStyle(int drawableIconID, int stringLabelID, int stringMessageID) {
copyUrlIcon_ = BranchUtil.getDrawable(activity_.getApplicationContext(), drawableIconID);
copyURlText_ = activity_.getResources().getString(stringLabelID);
urlCopiedMessage_ = activity_.getResources().getString(stringMessageID);
return this;
}
/**
* <p> Sets the alias for this link. </p>
*
* @param alias Link 'alias' can be used to label the endpoint on the link.
* <p>
* For example:
* http://bnc.lt/AUSTIN28.
* Should not exceed 128 characters
* </p>
* @return This Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setAlias(String alias) {
this.shortLinkBuilder_.setAlias(alias);
return this;
}
/**
* <p> Sets the amount of time that Branch allows a click to remain outstanding.</p>
*
* @param matchDuration A {@link Integer} value specifying the time that Branch allows a click to
* remain outstanding and be eligible to be matched with a new app session.
* @return This Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setMatchDuration(int matchDuration) {
this.shortLinkBuilder_.setDuration(matchDuration);
return this;
}
/**
* <p>
* Sets the share dialog to full width mode. Full width mode will show a non modal sheet with entire screen width.
* </p>
*
* @param setFullWidthStyle {@link Boolean} With value true if a full width style share sheet is desired.
* @return This Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setAsFullWidthStyle(boolean setFullWidthStyle) {
this.setFullWidthStyle_ = setFullWidthStyle;
return this;
}
/**
* Set the height for the divider for the sharing channels in the list. Set this to zero to remove the dividers
*
* @param height The new height of the divider in pixels.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setDividerHeight(int height) {
this.dividerHeight = height;
return this;
}
/**
* Set the title for the sharing dialog
*
* @param title {@link String} containing the value for the title text.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setSharingTitle(String title) {
this.sharingTitle = title;
return this;
}
/**
* Set the title for the sharing dialog
*
* @param titleView {@link View} for setting the title.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder setSharingTitle(View titleView) {
this.sharingTitleView = titleView;
return this;
}
/**
* Exclude items from the ShareSheet by package name String.
*
* @param packageName {@link String} package name to be excluded.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder excludeFromShareSheet(@NonNull String packageName) {
this.excludeFromShareSheet.add(packageName);
return this;
}
/**
* Exclude items from the ShareSheet by package name array.
*
* @param packageName {@link String[]} package name to be excluded.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder excludeFromShareSheet(@NonNull String[] packageName) {
this.excludeFromShareSheet.addAll(Arrays.asList(packageName));
return this;
}
/**
* Exclude items from the ShareSheet by package name List.
*
* @param packageNames {@link List} package name to be excluded.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder excludeFromShareSheet(@NonNull List<String> packageNames) {
this.excludeFromShareSheet.addAll(packageNames);
return this;
}
/**
* Include items from the ShareSheet by package name String. If only "com.Slack"
* is included, then only preferred sharing options + Slack
* will be displayed, for example.
*
* @param packageName {@link String} package name to be included.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder includeInShareSheet(@NonNull String packageName) {
this.includeInShareSheet.add(packageName);
return this;
}
/**
* Include items from the ShareSheet by package name Array. If only "com.Slack"
* is included, then only preferred sharing options + Slack
* will be displayed, for example.
*
* @param packageName {@link String[]} package name to be included.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder includeInShareSheet(@NonNull String[] packageName) {
this.includeInShareSheet.addAll(Arrays.asList(packageName));
return this;
}
/**
* Include items from the ShareSheet by package name List. If only "com.Slack"
* is included, then only preferred sharing options + Slack
* will be displayed, for example.
*
* @param packageNames {@link List} package name to be included.
* @return this Builder object to allow for chaining of calls to set methods.
*/
public ShareLinkBuilder includeInShareSheet(@NonNull List<String> packageNames) {
this.includeInShareSheet.addAll(packageNames);
return this;
}
/**
* <p> Set the given style to the List View showing the share sheet</p>
*
* @param resourceID A Styleable resource to be applied to the share sheet list view
*/
public void setStyleResourceID(@StyleRes int resourceID) {
styleResourceID_ = resourceID;
}
public void setShortLinkBuilderInternal(BranchShortLinkBuilder shortLinkBuilder) {
this.shortLinkBuilder_ = shortLinkBuilder;
}
/**
* <p>Creates an application selector dialog and share a link with user selected sharing option.
* The link is created with the parameters provided to the builder. </p>
*/
public void shareLink() {
branchReferral_.shareLink(this);
}
public Activity getActivity() {
return activity_;
}
public ArrayList<SharingHelper.SHARE_WITH> getPreferredOptions() {
return preferredOptions_;
}
List<String> getExcludedFromShareSheet() {
return excludeFromShareSheet;
}
List<String> getIncludedInShareSheet() {
return includeInShareSheet;
}
public Branch getBranch() {
return branch_;
}
public String getShareMsg() {
return shareMsg_;
}
public String getShareSub() {
return shareSub_;
}
public BranchLinkShareListener getCallback() {
return callback_;
}
public IChannelProperties getChannelPropertiesCallback() {
return channelPropertiesCallback_;
}
public String getDefaultURL() {
return defaultURL_;
}
public Drawable getMoreOptionIcon() {
return moreOptionIcon_;
}
public String getMoreOptionText() {
return moreOptionText_;
}
public Drawable getCopyUrlIcon() {
return copyUrlIcon_;
}
public String getCopyURlText() {
return copyURlText_;
}
public String getUrlCopiedMessage() {
return urlCopiedMessage_;
}
public BranchShortLinkBuilder getShortLinkBuilder() {
return shortLinkBuilder_;
}
public boolean getIsFullWidthStyle() {
return setFullWidthStyle_;
}
public int getDividerHeight() {
return dividerHeight;
}
public String getSharingTitle() {
return sharingTitle;
}
public View getSharingTitleView() {
return sharingTitleView;
}
public int getStyleResourceID() {
return styleResourceID_;
}
}
//------------------------ Content Indexing methods----------------------//
public void registerView(BranchUniversalObject
branchUniversalObject, BranchUniversalObject.RegisterViewStatusListener callback) {
if (context_ != null) {
ServerRequest req;
req = new ServerRequestRegisterView(context_, branchUniversalObject, systemObserver_, callback);
if (!req.constructError_ && !req.handleErrors(context_)) {
handleNewRequest(req);
}
}
}
///-------Instrumentation additional data---------------///
/**
* Update the extra instrumentation data provided to Branch
*
* @param instrumentationData A {@link HashMap} with key value pairs for instrumentation data.
*/
public void addExtraInstrumentationData(HashMap<String, String> instrumentationData) {
instrumentationExtraData_.putAll(instrumentationData);
}
/**
* Update the extra instrumentation data provided to Branch
*
* @param key A {@link String} Value for instrumentation data key
* @param value A {@link String} Value for instrumentation data value
*/
public void addExtraInstrumentationData(String key, String value) {
instrumentationExtraData_.put(key, value);
}
//-------------------- Branch view handling--------------------//
@Override
public void onBranchViewVisible(String action, String branchViewID) {
//No Implementation on purpose
}
@Override
public void onBranchViewAccepted(String action, String branchViewID) {
if (ServerRequestInitSession.isInitSessionAction(action)) {
checkForAutoDeepLinkConfiguration();
}
}
@Override
public void onBranchViewCancelled(String action, String branchViewID) {
if (ServerRequestInitSession.isInitSessionAction(action)) {
checkForAutoDeepLinkConfiguration();
}
}
@Override
public void onBranchViewError(int errorCode, String errorMsg, String action) {
if (ServerRequestInitSession.isInitSessionAction(action)) {
checkForAutoDeepLinkConfiguration();
}
}
/**
* Interface for defining optional Branch view behaviour for Activities
*/
public interface IBranchViewControl {
/**
* Defines if an activity is interested to show Branch views or not.
* By default activities are considered as Branch view enabled. In case of activities which are not interested to show a Branch view (Splash screen for example)
* should implement this and return false. The pending Branch view will be shown with the very next Branch view enabled activity
*
* @return A {@link Boolean} whose value is true if the activity don't want to show any Branch view.
*/
boolean skipBranchViewsOnThisActivity();
}
///----------------- Instant App support--------------------------//
private static Context lastApplicationContext = null;
private static Boolean isInstantApp = null;
/**
* Checks if this is an Instant app instance
*
* @param context Current {@link Context}
* @return {@code true} if current application is an instance of instant app
*/
public static boolean isInstantApp(@NonNull Context context) {
try {
Context applicationContext = context.getApplicationContext();
if (isInstantApp != null && applicationContext.equals(lastApplicationContext)) {
return isInstantApp.booleanValue();
} else {
isInstantApp = null;
lastApplicationContext = applicationContext;
applicationContext.getClassLoader().loadClass("com.google.android.instantapps.supervisor.InstantAppsRuntime");
isInstantApp = Boolean.valueOf(true);
}
} catch (Exception ex) {
isInstantApp = Boolean.valueOf(false);
}
return isInstantApp.booleanValue();
}
/**
* Method shows play store install prompt for the full app. Thi passes the referrer to the installed application. The same deep link params as the instant app are provided to the
* full app up on Branch#initSession()
*
* @param activity Current activity
* @param requestCode Request code for the activity to receive the result
* @return {@code true} if install prompt is shown to user
*/
public static boolean showInstallPrompt(@NonNull Activity activity, int requestCode) {
String installReferrerString = "";
if (Branch.getInstance() != null) {
JSONObject latestReferringParams = Branch.getInstance().getLatestReferringParams();
String referringLinkKey = "~" + Defines.Jsonkey.ReferringLink.getKey();
if (latestReferringParams != null && latestReferringParams.has(referringLinkKey)) {
try {
String referringLink = latestReferringParams.getString(referringLinkKey);
installReferrerString = Defines.Jsonkey.IsFullAppConv.getKey() + "=true&" + Defines.Jsonkey.ReferringLink.getKey() + "=" + referringLink;
} catch (JSONException e) {
e.printStackTrace();
}
}
}
return showInstallPrompt(activity, requestCode, installReferrerString);
}
/**
* Method shows play store install prompt for the full app. Use this method only if you have custom parameters to pass to the full app using referrer else use
* {@link #showInstallPrompt(Activity, int)}
*
* @param activity Current activity
* @param requestCode Request code for the activity to receive the result
* @param referrer Any custom referrer string to pass to full app (must be of format "referrer_key1=referrer_value1&referrer_key2=referrer_value2")
* @return {@code true} if install prompt is shown to user
*/
public static boolean showInstallPrompt(@NonNull Activity activity, int requestCode, @Nullable String referrer) {
String installReferrerString = Defines.Jsonkey.IsFullAppConv.getKey() + "=true&" + referrer;
return showInstallPrompt(activity, requestCode, installReferrerString);
}
/**
* Method shows play store install prompt for the full app. Use this method only if you want the full app to receive a custom {@link BranchUniversalObject} to do deferred deep link.
* Please see {@link #showInstallPrompt(Activity, int)}
* NOTE :
* This method will do a synchronous generation of Branch short link for the BUO. So please consider calling this method on non UI thread
* Please make sure your instant app and full ap are using same Branch key in order for the deferred deep link working
*
* @param activity Current activity
* @param requestCode Request code for the activity to receive the result
* @param buo {@link BranchUniversalObject} to pass to the full app up on install
* @return {@code true} if install prompt is shown to user
*/
public static boolean showInstallPrompt(@NonNull Activity activity, int requestCode, @NonNull BranchUniversalObject buo) {
if (buo != null) {
String shortUrl = buo.getShortUrl(activity, new LinkProperties());
String installReferrerString = Defines.Jsonkey.ReferringLink.getKey() + "=" + shortUrl;
if (!TextUtils.isEmpty(installReferrerString)) {
return showInstallPrompt(activity, requestCode, installReferrerString);
} else {
return showInstallPrompt(activity, requestCode, "");
}
}
return false;
}
private static boolean doShowInstallPrompt(@NonNull Activity activity, int requestCode, @Nullable String referrer) {
if (activity == null) {
Log.e("BranchSDK", "Unable to show install prompt. Activity is null");
return false;
} else if (!isInstantApp(activity)) {
Log.e("BranchSDK", "Unable to show install prompt. Application is not an instant app");
return false;
} else {
Intent intent = (new Intent("android.intent.action.VIEW")).setPackage("com.android.vending").addCategory("android.intent.category.DEFAULT")
.putExtra("callerId", activity.getPackageName())
.putExtra("overlay", true);
Uri.Builder uriBuilder = (new Uri.Builder()).scheme("market").authority("details").appendQueryParameter("id", activity.getPackageName());
if (!TextUtils.isEmpty(referrer)) {
uriBuilder.appendQueryParameter("referrer", referrer);
}
intent.setData(uriBuilder.build());
activity.startActivityForResult(intent, requestCode);
return true;
}
}
}
|
Java doc error fix
|
Branch-SDK/src/io/branch/referral/Branch.java
|
Java doc error fix
|
|
Java
|
mpl-2.0
|
c616302f297a0108bd964c3c49b121e8e327cf08
| 0
|
GreenDelta/olca-modules,GreenDelta/olca-modules,GreenDelta/olca-modules
|
package org.openlca.io.simapro.csv.input;
import org.openlca.core.database.IDatabase;
import org.openlca.core.database.ProcessDao;
import org.openlca.core.model.AllocationFactor;
import org.openlca.core.model.AllocationMethod;
import org.openlca.core.model.Category;
import org.openlca.core.model.Exchange;
import org.openlca.core.model.Flow;
import org.openlca.core.model.FlowPropertyFactor;
import org.openlca.core.model.FlowType;
import org.openlca.core.model.ModelType;
import org.openlca.core.model.Process;
import org.openlca.core.model.ProcessDocumentation;
import org.openlca.core.model.ProcessType;
import org.openlca.core.model.Uncertainty;
import org.openlca.io.Categories;
import org.openlca.io.UnitMappingEntry;
import org.openlca.io.maps.MapFactor;
import org.openlca.simapro.csv.model.AbstractExchangeRow;
import org.openlca.simapro.csv.model.annotations.BlockHandler;
import org.openlca.simapro.csv.model.enums.ElementaryFlowType;
import org.openlca.simapro.csv.model.enums.ProductType;
import org.openlca.simapro.csv.model.process.ElementaryExchangeRow;
import org.openlca.simapro.csv.model.process.ProcessBlock;
import org.openlca.simapro.csv.model.process.ProductExchangeRow;
import org.openlca.simapro.csv.model.process.ProductOutputRow;
import org.openlca.simapro.csv.model.process.RefProductRow;
import org.openlca.util.KeyGen;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class ProcessHandler {
private Logger log = LoggerFactory.getLogger(getClass());
private IDatabase database;
private RefData refData;
private ProcessDao dao;
// currently mapped process and process block
private Process process;
private ProcessBlock block;
private ProcessParameterMapper parameterMapper;
public ProcessHandler(IDatabase database, RefData refData) {
this.database = database;
this.refData = refData;
this.dao = new ProcessDao(database);
}
@BlockHandler
public void handleProcess(ProcessBlock block) {
String refId = KeyGen.get(block.getIdentifier());
Process process = dao.getForRefId(refId);
if (process != null) {
log.warn("a process with the identifier {} is already in the "
+ "database and was not imported", refId);
}
log.trace("import process {}", refId);
process = new Process();
process.setRefId(refId);
process.setDefaultAllocationMethod(AllocationMethod.PHYSICAL);
process.setDocumentation(new ProcessDocumentation());
this.process = process;
this.block = block;
mapData();
try {
dao.insert(process);
} catch (Exception e) {
log.error("failed to insert process " + refId, e);
}
this.process = null;
}
private void mapData() {
mapName();
mapLocation();
mapCategory();
mapType();
new ProcessDocMapper(database, refData).map(block, process);
parameterMapper = new ProcessParameterMapper(database);
long scope = parameterMapper.map(block, process);
mapProductOutputs(scope);
mapProductInputs(scope);
mapElementaryFlows(scope);
mapAllocation();
}
private void mapName() {
if (block.getName() != null) {
process.setName(block.getName());
return;
}
Flow refFlow = getRefFlow();
if (refFlow != null) {
process.setName(refFlow.getName());
return;
}
process.setName(block.getIdentifier());
}
private void mapLocation() {
Flow refFlow = getRefFlow();
if (refFlow == null)
return;
process.setLocation(refFlow.getLocation());
}
private void mapAllocation() {
for (ProductOutputRow output : block.getProducts()) {
double value = output.getAllocation() / 100d;
long productId = refData.getProduct(output.getName()).getId();
addFactor(AllocationMethod.PHYSICAL, productId, value);
addFactor(AllocationMethod.ECONOMIC, productId, value);
for (Exchange e : process.getExchanges()) {
if (!isOutputProduct(e)) {
addCausalFactor(productId, e, value);
}
}
}
}
private boolean isOutputProduct(Exchange e) {
return e != null && e.getFlow() != null
&& !e.isInput() && !e.isAvoidedProduct()
&& e.getFlow().getFlowType() == FlowType.PRODUCT_FLOW;
}
private void addFactor(AllocationMethod method, long productId, double value) {
AllocationFactor f = new AllocationFactor();
f.setAllocationType(method);
f.setValue(value);
f.setProductId(productId);
process.getAllocationFactors().add(f);
}
private void addCausalFactor(long productId, Exchange e, double value) {
AllocationFactor f = new AllocationFactor();
f.setAllocationType(AllocationMethod.CAUSAL);
f.setValue(value);
f.setProductId(productId);
f.setExchange(e);
process.getAllocationFactors().add(f);
}
private Flow getRefFlow() {
if (!block.getProducts().isEmpty()) {
ProductOutputRow refRow = block.getProducts().get(0);
Flow flow = refData.getProduct(refRow.getName());
if (flow != null)
return flow;
}
if (block.getWasteTreatment() != null)
return refData.getProduct(block.getWasteTreatment().getName());
return null;
}
private void mapProductOutputs(long scope) {
boolean first = true;
for (ProductOutputRow row : block.getProducts()) {
Exchange e = createProductOutput(row, scope);
if (first && e != null) {
process.setQuantitativeReference(e);
first = false;
}
}
if (block.getWasteTreatment() != null) {
Exchange e = createProductOutput(block.getWasteTreatment(),
scope);
process.setQuantitativeReference(e);
}
}
private Exchange createProductOutput(RefProductRow row, long scope) {
Flow flow = refData.getProduct(row.getName());
Exchange e = initExchange(row, scope, flow);
if (e == null)
return null;
e.setInput(false);
setUnit(e, row.getUnit());
process.getExchanges().add(e);
return e;
}
private void mapProductInputs(long scope) {
for (ProductType type : ProductType.values()) {
for (ProductExchangeRow row : block.getProductExchanges(type)) {
Flow flow = refData.getProduct(row.getName());
Exchange e = initExchange(row, scope, flow);
if (e == null)
continue;
e.setInput(true);
e.setAvoidedProduct(type == ProductType.AVOIDED_PRODUCTS);
setUnit(e, row.getUnit());
process.getExchanges().add(e);
}
}
}
private void mapElementaryFlows(long scope) {
for (ElementaryFlowType type : ElementaryFlowType.values()) {
boolean isInput = type == ElementaryFlowType.RESOURCES;
for (ElementaryExchangeRow row : block
.getElementaryExchangeRows(type)) {
String key = KeyGen.get(row.getName(),
type.getExchangeHeader(), row.getSubCompartment(),
row.getUnit());
MapFactor<Flow> factor = refData.getMappedFlow(key);
Exchange e;
if (factor != null) {
e = initMappedExchange(factor, row, scope);
setRefUnit(e);
} else {
Flow flow = refData.getElemFlow(key);
e = initExchange(row, scope, flow);
setUnit(e, row.getUnit());
}
if (e == null)
continue;
e.setInput(isInput);
process.getExchanges().add(e);
}
}
}
private Exchange initMappedExchange(MapFactor<Flow> mappedFlow,
ElementaryExchangeRow row, long scope) {
Flow flow = mappedFlow.getEntity();
Exchange e = initExchange(row, scope, flow);
if (e == null)
return null;
double f = mappedFlow.getFactor();
e.setAmountValue(f * e.getAmountValue());
if (e.getAmountFormula() != null) {
String formula = f + " * ( " + e.getAmountFormula() + " )";
e.setAmountFormula(formula);
}
if (e.getUncertainty() != null) {
e.getUncertainty().scale(f);
}
return e;
}
private Exchange initExchange(AbstractExchangeRow row, long scopeId,
Flow flow) {
if (flow == null) {
log.error("could not create exchange as there was now flow found "
+ "for {}", row);
return null;
}
Exchange e = new Exchange();
e.setFlow(flow);
e.description = row.getComment();
setAmount(e, row.getAmount(), scopeId);
Uncertainty uncertainty = Uncertainties.get(e.getAmountValue(),
row.getUncertaintyDistribution());
e.setUncertainty(uncertainty);
return e;
}
/** Sets the exchange unit and flow property for the given SimaPro unit. */
private void setUnit(Exchange e, String unit) {
if (e == null || e.getFlow() == null)
return;
UnitMappingEntry entry = refData.getUnitMapping().getEntry(unit);
if (entry == null) {
log.error("unknown unit {}; could not set exchange unit", unit);
return;
}
Flow flow = e.getFlow();
e.setUnit(entry.getUnit());
FlowPropertyFactor factor = flow.getFactor(entry.getFlowProperty());
e.setFlowPropertyFactor(factor);
}
/**
* Sets the exchange unit and flow property from the flow reference data
* (used for mapped reference flows).
*/
private void setRefUnit(Exchange e) {
if (e == null || e.getFlow() == null)
return;
Flow f = e.getFlow();
FlowPropertyFactor fac = f.getReferenceFactor();
if (fac == null || fac.getFlowProperty() == null)
return;
e.setFlowPropertyFactor(fac);
FlowProperty prop = fac.getFlowProperty();
UnitGroup group = prop.getUnitGroup();
if (group == null || group.getReferenceUnit() == null)
return;
e.setUnit(group.getReferenceUnit());
}
private void setAmount(Exchange e, String amountText, long scope) {
try {
double val = Double.parseDouble(amountText);
e.setAmountValue(val);
} catch (Exception e) {
double val = parameterMapper.eval(amountText, scope);
e.setAmountValue(val);
e.setAmountFormula(amountText);
}
}
private void mapCategory() {
String categoryPath = null;
if (!block.getProducts().isEmpty()) {
ProductOutputRow row = block.getProducts().get(0);
categoryPath = row.getCategory();
} else if (block.getWasteTreatment() != null)
categoryPath = block.getWasteTreatment().getCategory();
if (categoryPath == null)
return;
String[] path = categoryPath.split("\\\\");
Category category = Categories.findOrAdd(database, ModelType.PROCESS,
path);
process.setCategory(category);
}
private void mapType() {
org.openlca.simapro.csv.model.enums.ProcessType type = block
.getProcessType();
if (type == org.openlca.simapro.csv.model.enums.ProcessType.SYSTEM)
process.setProcessType(ProcessType.LCI_RESULT);
else
process.setProcessType(ProcessType.UNIT_PROCESS);
}
}
|
olca-io/src/main/java/org/openlca/io/simapro/csv/input/ProcessHandler.java
|
package org.openlca.io.simapro.csv.input;
import org.openlca.core.database.IDatabase;
import org.openlca.core.database.ProcessDao;
import org.openlca.core.model.AllocationFactor;
import org.openlca.core.model.AllocationMethod;
import org.openlca.core.model.Category;
import org.openlca.core.model.Exchange;
import org.openlca.core.model.Flow;
import org.openlca.core.model.FlowPropertyFactor;
import org.openlca.core.model.FlowType;
import org.openlca.core.model.ModelType;
import org.openlca.core.model.Process;
import org.openlca.core.model.ProcessDocumentation;
import org.openlca.core.model.ProcessType;
import org.openlca.core.model.Uncertainty;
import org.openlca.io.Categories;
import org.openlca.io.UnitMappingEntry;
import org.openlca.io.maps.MapFactor;
import org.openlca.simapro.csv.model.AbstractExchangeRow;
import org.openlca.simapro.csv.model.annotations.BlockHandler;
import org.openlca.simapro.csv.model.enums.ElementaryFlowType;
import org.openlca.simapro.csv.model.enums.ProductType;
import org.openlca.simapro.csv.model.process.ElementaryExchangeRow;
import org.openlca.simapro.csv.model.process.ProcessBlock;
import org.openlca.simapro.csv.model.process.ProductExchangeRow;
import org.openlca.simapro.csv.model.process.ProductOutputRow;
import org.openlca.simapro.csv.model.process.RefProductRow;
import org.openlca.util.KeyGen;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class ProcessHandler {
private Logger log = LoggerFactory.getLogger(getClass());
private IDatabase database;
private RefData refData;
private ProcessDao dao;
// currently mapped process and process block
private Process process;
private ProcessBlock block;
private ProcessParameterMapper parameterMapper;
public ProcessHandler(IDatabase database, RefData refData) {
this.database = database;
this.refData = refData;
this.dao = new ProcessDao(database);
}
@BlockHandler
public void handleProcess(ProcessBlock block) {
String refId = KeyGen.get(block.getIdentifier());
Process process = dao.getForRefId(refId);
if (process != null) {
log.warn("a process with the identifier {} is already in the "
+ "database and was not imported", refId);
}
log.trace("import process {}", refId);
process = new Process();
process.setRefId(refId);
process.setDefaultAllocationMethod(AllocationMethod.PHYSICAL);
process.setDocumentation(new ProcessDocumentation());
this.process = process;
this.block = block;
mapData();
try {
dao.insert(process);
} catch (Exception e) {
log.error("failed to insert process " + refId, e);
}
this.process = null;
}
private void mapData() {
mapName();
mapLocation();
mapCategory();
mapType();
new ProcessDocMapper(database, refData).map(block, process);
parameterMapper = new ProcessParameterMapper(database);
long scope = parameterMapper.map(block, process);
mapProductOutputs(scope);
mapProductInputs(scope);
mapElementaryFlows(scope);
mapAllocation();
}
private void mapName() {
if (block.getName() != null) {
process.setName(block.getName());
return;
}
Flow refFlow = getRefFlow();
if (refFlow != null) {
process.setName(refFlow.getName());
return;
}
process.setName(block.getIdentifier());
}
private void mapLocation() {
Flow refFlow = getRefFlow();
if (refFlow == null)
return;
process.setLocation(refFlow.getLocation());
}
private void mapAllocation() {
for (ProductOutputRow output : block.getProducts()) {
double value = output.getAllocation() / 100d;
long productId = refData.getProduct(output.getName()).getId();
addFactor(AllocationMethod.PHYSICAL, productId, value);
addFactor(AllocationMethod.ECONOMIC, productId, value);
for (Exchange e : process.getExchanges())
if (!isOutputProduct(e))
addCausalFactor(productId, e, value);
}
}
private boolean isOutputProduct(Exchange exchange) {
return exchange != null && exchange.getFlow() != null
&& !exchange.isInput() && !exchange.isAvoidedProduct()
&& exchange.getFlow().getFlowType() == FlowType.PRODUCT_FLOW;
}
private void addFactor(AllocationMethod method, long productId, double value) {
AllocationFactor factor = new AllocationFactor();
factor.setAllocationType(method);
factor.setValue(value);
factor.setProductId(productId);
process.getAllocationFactors().add(factor);
}
private void addCausalFactor(long productId, Exchange exchange, double value) {
AllocationFactor factor = new AllocationFactor();
factor.setAllocationType(AllocationMethod.CAUSAL);
factor.setValue(value);
factor.setProductId(productId);
factor.setExchange(exchange);
process.getAllocationFactors().add(factor);
}
private Flow getRefFlow() {
if (!block.getProducts().isEmpty()) {
ProductOutputRow refRow = block.getProducts().get(0);
Flow flow = refData.getProduct(refRow.getName());
if (flow != null)
return flow;
}
if (block.getWasteTreatment() != null)
return refData.getProduct(block.getWasteTreatment().getName());
return null;
}
private void mapProductOutputs(long scopeId) {
boolean first = true;
for (ProductOutputRow row : block.getProducts()) {
Exchange exchange = createProductOutput(row, scopeId);
if (first && exchange != null) {
process.setQuantitativeReference(exchange);
first = false;
}
}
if (block.getWasteTreatment() != null) {
Exchange exchange = createProductOutput(block.getWasteTreatment(),
scopeId);
process.setQuantitativeReference(exchange);
}
}
private void mapProductInputs(long scope) {
for (ProductType type : ProductType.values()) {
for (ProductExchangeRow row : block.getProductExchanges(type)) {
Flow flow = refData.getProduct(row.getName());
if (flow == null) {
log.error("unknown flow {}; could not create exchange", row);
continue;
}
Exchange exchange = addExchange(row, scope, flow);
if (exchange != null) {
exchange.setInput(true);
exchange.setAvoidedProduct(type == ProductType.AVOIDED_PRODUCTS);
}
}
}
}
private void mapElementaryFlows(long scope) {
for (ElementaryFlowType type : ElementaryFlowType.values()) {
boolean isInputType = type == ElementaryFlowType.RESOURCES;
for (ElementaryExchangeRow row : block
.getElementaryExchangeRows(type)) {
String key = KeyGen.get(row.getName(),
type.getExchangeHeader(), row.getSubCompartment(),
row.getUnit());
MapFactor<Flow> mappedFlow = refData.getMappedFlow(key);
Exchange exchange;
if (mappedFlow != null)
exchange = createMappedExchange(mappedFlow, row, scope);
else {
Flow flow = refData.getElemFlow(key);
exchange = addExchange(row, scope, flow);
}
if (exchange != null) {
exchange.setInput(isInputType);
}
}
}
}
private Exchange createMappedExchange(MapFactor<Flow> mappedFlow,
ElementaryExchangeRow row, long scope) {
Flow flow = mappedFlow.getEntity();
Exchange exchange = addExchange(row, scope, flow);
if (exchange == null)
return null;
double f = mappedFlow.getFactor();
exchange.setAmountValue(f * exchange.getAmountValue());
if (exchange.getAmountFormula() != null) {
String formula = f + " * ( " + exchange.getAmountFormula() + " )";
exchange.setAmountFormula(formula);
}
if (exchange.getUncertainty() != null)
exchange.getUncertainty().scale(f);
return exchange;
}
private Exchange createProductOutput(RefProductRow row, long scopeId) {
Flow flow = refData.getProduct(row.getName());
Exchange exchange = addExchange(row, scopeId, flow);
if (exchange != null)
exchange.setInput(false);
return exchange;
}
private Exchange addExchange(AbstractExchangeRow row, long scopeId,
Flow flow) {
if (flow == null) {
log.error("could not create exchange as there was now flow found "
+ "for {}", row);
return null;
}
Exchange exchange = new Exchange();
exchange.setFlow(flow);
exchange.description = row.getComment();
setExchangeUnit(exchange, flow, row.getUnit());
setAmount(exchange, row.getAmount(), scopeId);
Uncertainty uncertainty = Uncertainties.get(exchange.getAmountValue(),
row.getUncertaintyDistribution());
exchange.setUncertainty(uncertainty);
process.getExchanges().add(exchange);
return exchange;
}
private void setExchangeUnit(Exchange exchange, Flow flow, String unit) {
UnitMappingEntry entry = refData.getUnitMapping().getEntry(unit);
if (entry == null) {
log.error("unknown unit {}; could not set exchange unit", unit);
return;
}
exchange.setUnit(entry.getUnit());
FlowPropertyFactor factor = flow.getFactor(entry.getFlowProperty());
exchange.setFlowPropertyFactor(factor);
}
private void setAmount(Exchange exchange, String amountText, long scope) {
try {
double val = Double.parseDouble(amountText);
exchange.setAmountValue(val);
} catch (Exception e) {
double val = parameterMapper.eval(amountText, scope);
exchange.setAmountValue(val);
exchange.setAmountFormula(amountText);
}
}
private void mapCategory() {
String categoryPath = null;
if (!block.getProducts().isEmpty()) {
ProductOutputRow row = block.getProducts().get(0);
categoryPath = row.getCategory();
} else if (block.getWasteTreatment() != null)
categoryPath = block.getWasteTreatment().getCategory();
if (categoryPath == null)
return;
String[] path = categoryPath.split("\\\\");
Category category = Categories.findOrAdd(database, ModelType.PROCESS,
path);
process.setCategory(category);
}
private void mapType() {
org.openlca.simapro.csv.model.enums.ProcessType type = block
.getProcessType();
if (type == org.openlca.simapro.csv.model.enums.ProcessType.SYSTEM)
process.setProcessType(ProcessType.LCI_RESULT);
else
process.setProcessType(ProcessType.UNIT_PROCESS);
}
}
|
:bug: wrong reference units of mapped flows in SimaPro import
|
olca-io/src/main/java/org/openlca/io/simapro/csv/input/ProcessHandler.java
|
:bug: wrong reference units of mapped flows in SimaPro import
|
|
Java
|
lgpl-2.1
|
d8fcf45143b5997ea081014cb9a68ce054e6d249
| 0
|
liscju/checkstyle,ilanKeshet/checkstyle,attatrol/checkstyle,mkordas/checkstyle,AkshitaKukreja30/checkstyle,sirdis/checkstyle,StetsiukRoman/checkstyle,WilliamRen/checkstyle,checkstyle/checkstyle,llocc/checkstyle,universsky/checkstyle,romani/checkstyle,romani/checkstyle,liscju/checkstyle,WilliamRen/checkstyle,rmswimkktt/checkstyle,philwebb/checkstyle,pbaranchikov/checkstyle,jochenvdv/checkstyle,rnveach/checkstyle,jochenvdv/checkstyle,jasonchaffee/checkstyle,Bhavik3/checkstyle,gallandarakhneorg/checkstyle,Andrew0701/checkstyle,philwebb/checkstyle,zofuthan/checkstyle-1,Godin/checkstyle,mkordas/checkstyle,vboerchers/checkstyle,llocc/checkstyle,nikhilgupta23/checkstyle,gallandarakhneorg/checkstyle,FeodorFitsner/checkstyle,nikhilgupta23/checkstyle,cs1331/checkstyle,bansalayush/checkstyle,another-dave/checkstyle,ivanov-alex/checkstyle,jonmbake/checkstyle,WonderCsabo/checkstyle,nikhilgupta23/checkstyle,rnveach/checkstyle,AkshitaKukreja30/checkstyle,checkstyle/checkstyle,jasonchaffee/checkstyle,izishared/checkstyle,Andrew0701/checkstyle,autermann/checkstyle,rnveach/checkstyle,sharang108/checkstyle,llocc/checkstyle,zofuthan/checkstyle-1,cs1331/checkstyle,FeodorFitsner/checkstyle,zofuthan/checkstyle-1,WonderCsabo/checkstyle,jasonchaffee/checkstyle,naver/checkstyle,ivanov-alex/checkstyle,HubSpot/checkstyle,sabaka/checkstyle,sirdis/checkstyle,attatrol/checkstyle,sabaka/checkstyle,naver/checkstyle,izishared/checkstyle,checkstyle/checkstyle,romani/checkstyle,beckerhd/checkstyle,naver/checkstyle,AkshitaKukreja30/checkstyle,jonmbake/checkstyle,baratali/checkstyle,pbaranchikov/checkstyle,jochenvdv/checkstyle,gallandarakhneorg/checkstyle,jdoyle65/checkstyle,pietern/checkstyle,mkordas/checkstyle,pietern/checkstyle,attatrol/checkstyle,HubSpot/checkstyle,vboerchers/checkstyle,autermann/checkstyle,universsky/checkstyle,ivanov-alex/checkstyle,rmswimkktt/checkstyle,rmswimkktt/checkstyle,ilanKeshet/checkstyle,StetsiukRoman/checkstyle,sharang108/checkstyle,autermann/checkstyle,jdoyle65/checkstyle,universsky/checkstyle,ilanKeshet/checkstyle,cs1331/checkstyle,romani/checkstyle,StetsiukRoman/checkstyle,designreuse/checkstyle,Bhavik3/checkstyle,beckerhd/checkstyle,WilliamRen/checkstyle,WonderCsabo/checkstyle,jonmbake/checkstyle,romani/checkstyle,sabaka/checkstyle,rnveach/checkstyle,baratali/checkstyle,HubSpot/checkstyle,MEZk/checkstyle,sharang108/checkstyle,philwebb/checkstyle,MEZk/checkstyle,FeodorFitsner/checkstyle,Godin/checkstyle,checkstyle/checkstyle,another-dave/checkstyle,MEZk/checkstyle,vboerchers/checkstyle,romani/checkstyle,beckerhd/checkstyle,sirdis/checkstyle,rnveach/checkstyle,pietern/checkstyle,checkstyle/checkstyle,designreuse/checkstyle,Bhavik3/checkstyle,liscju/checkstyle,another-dave/checkstyle,rnveach/checkstyle,Godin/checkstyle,baratali/checkstyle,checkstyle/checkstyle,bansalayush/checkstyle,izishared/checkstyle,bansalayush/checkstyle,designreuse/checkstyle
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2004 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.checks;
import java.util.Arrays;
import org.apache.regexp.RE;
import org.apache.regexp.RESyntaxException;
import org.apache.commons.beanutils.ConversionException;
import com.puppycrawl.tools.checkstyle.api.DetailAST;
import com.puppycrawl.tools.checkstyle.api.Utils;
/**
* <p>
* Checks the header of the source against a header file that contains a
* <a href="http://jakarta.apache.org/regexp/apidocs/org/apache/regexp/RE.html">
* regular expression</a>
* for each line of the source header.
* </p>
* <p>
* Rationale: In some projects checking against a fixed header
* is not sufficient (see {@link HeaderCheck}), e.g.
* the header might require a copyright line where the year information
* is not static.
* </p>
*
* <p>For example, consider the following header file:</p>
*
* <pre>
* line 1: ^/{71}$
* line 2: ^// checkstyle:$
* line 3: ^// Checks Java source code for adherence to a set of rules\.$
* line 4: ^// Copyright \(C\) \d\d\d\d Oliver Burn$
* line 5: ^// Last modification by \$Author.*\$$
* line 6: ^/{71}$
* </pre>
*
* <p>Lines 1 and 6 demonstrate a more compact notation for 71 '/'
* characters. Line 4 enforces that the copyright notice includes a four digit
* year. Line 5 is an example how to enforce revision control keywords in a file
* header. All lines start from ^ (line start symbol) and end with $ (line end)
* to force matching regexp with complete line in the source file.</p>
* <p>An example of how to configure the check to use header file
* "java.header" is:
* </p>
* <pre>
* <module name="RegexpHeader">
* <property name="headerFile" value="java.header"/>
* </module>
* </pre>
* <p class="body">
* To configure the check to use header file <code
* >"java.header"</code> and <code
* >10</code> and <code>13</code> muli-lines:
* </p>
* <pre class="body">
* <module name="RegexpHeader">
* <property name="headerFile" value="java.header"/>
* <property name="multiLines" value="10, 13"/>
*</module>
* </pre>
* <p><u>Note</u>: ignoreLines property has been removed from this check to
* simplify it. The regular expression "^.*$" can be used to ignore a line.
* </p>
*
* @author Lars Khne
* @author o_sukhodolsky
*/
public class RegexpHeaderCheck extends AbstractHeaderCheck
{
/** empty array to avoid instantiations. */
private static final int[] EMPTY_INT_ARRAY = new int[0];
/** the header lines to repeat (0 or more) in the check, sorted. */
private int[] mMultiLines = EMPTY_INT_ARRAY;
/** the compiled regular expressions */
private RE[] mHeaderRegexps;
/**
* @param aLineNo a line number
* @return if <code>aLineNo</code> is one of the repeat header lines.
*/
private boolean isMultiLine(int aLineNo)
{
return (Arrays.binarySearch(mMultiLines, aLineNo + 1) >= 0);
}
/**
* Set the lines numbers to repeat in the header check.
* @param aList comma separated list of line numbers to repeat in header.
*/
public void setMultiLines(int[] aList)
{
if (aList == null || aList.length == 0) {
mMultiLines = EMPTY_INT_ARRAY;
return;
}
mMultiLines = new int[aList.length];
System.arraycopy(aList, 0, mMultiLines, 0, aList.length);
Arrays.sort(mMultiLines);
}
/**
* Sets the file that contains the header to check against.
* @param aFileName the file that contains the header to check against.
* @throws ConversionException if the file cannot be loaded or one line
* is not a regexp.
*/
public void setHeaderFile(String aFileName)
throws ConversionException
{
super.setHeaderFile(aFileName);
initHeaderRegexps();
}
/**
* Set the header to check against. Individual lines in the header
* must be separated by '\n' characters.
* @param aHeader header content to check against.
* @throws ConversionException if the header cannot be loaded or one line
* is not a regexp.
*/
public void setHeader(String aHeader)
{
super.setHeader(aHeader);
initHeaderRegexps();
}
/** Initializes {@link #mHeaderRegexps} from {@link #mHeaderLines}. */
private void initHeaderRegexps()
{
final String[] headerLines = getHeaderLines();
if (headerLines != null) {
mHeaderRegexps = new RE[headerLines.length];
for (int i = 0; i < headerLines.length; i++) {
try {
// TODO: Not sure if chache in Utils is still necessary
mHeaderRegexps[i] = Utils.getRE(headerLines[i]);
}
catch (RESyntaxException ex) {
throw new ConversionException(
"line " + i + " in header specification"
+ " is not a regular expression");
}
}
}
}
/**
* Checks if a code line matches the required header line.
* @param aLineNo the linenumber to check against the header
* @param aHeaderLineNo the header line number.
* @return true if and only if the line matches the required header line.
*/
private boolean isMatch(int aLineNo, int aHeaderLineNo)
{
final String line = getLines()[aLineNo];
return mHeaderRegexps[aHeaderLineNo].match(line);
}
/** {@inheritDoc} */
public void beginTree(DetailAST aRootAST)
{
final int headerSize = getHeaderLines().length;
final int fileSize = getLines().length;
if (headerSize - mMultiLines.length > fileSize) {
log(1, "header.missing");
}
else {
int headerLineNo = 0;
int i;
for (i = 0; headerLineNo < headerSize && i < fileSize; i++) {
boolean isMatch = isMatch(i, headerLineNo);
while (!isMatch && isMultiLine(headerLineNo)) {
headerLineNo++;
isMatch = (headerLineNo == headerSize)
|| isMatch(i, headerLineNo);
}
if (!isMatch) {
log(i + 1, "header.mismatch",
getHeaderLines()[headerLineNo]);
break; // stop checking
}
if (!isMultiLine(headerLineNo)) {
headerLineNo++;
}
}
if (i == fileSize) {
// if file finished, but we have at least one non-multi-line
// header isn't completed
for (; headerLineNo < headerSize; headerLineNo++) {
if (!isMultiLine(headerLineNo)) {
log(1, "header.missing");
break;
}
}
}
}
}
}
|
src/checkstyle/com/puppycrawl/tools/checkstyle/checks/RegexpHeaderCheck.java
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2004 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.checks;
import java.util.Arrays;
import org.apache.regexp.RE;
import org.apache.regexp.RESyntaxException;
import org.apache.commons.beanutils.ConversionException;
import com.puppycrawl.tools.checkstyle.api.DetailAST;
import com.puppycrawl.tools.checkstyle.api.Utils;
/**
* <p>
* Checks the header of the source against a header file that contains a
* <a href="http://jakarta.apache.org/regexp/apidocs/org/apache/regexp/RE.html">
* regular expression</a>
* for each line of the source header.
* </p>
* <p>
* Rationale: In some projects checking against a fixed header
* is not sufficient (see {@link HeaderCheck}), e.g.
* the header might require a copyright line where the year information
* is not static.
* </p>
*
* <p>For example, consider the following header file:</p>
*
* <pre>
* line 1: ^/{71}$
* line 2: ^// checkstyle:$
* line 3: ^// Checks Java source code for adherence to a set of rules\.$
* line 4: ^// Copyright \(C\) \d\d\d\d Oliver Burn$
* line 5: ^// Last modification by \$Author.*\$$
* line 6: ^/{71}$
* </pre>
*
* <p>Lines 1 and 6 demonstrate a more compact notation for 71 '/'
* characters. Line 4 enforces that the copyright notice includes a four digit
* year. Line 5 is an example how to enforce revision control keywords in a file
* header. All lines start from ^ (line start symbol) and end with $ (line end)
* to force matching regexp with complete line in the source file.</p>
* <p>An example of how to configure the check to use header file
* "java.header" is:
* </p>
* <pre>
* <module name="RegexpHeader">
* <property name="headerFile" value="java.header"/>
* </module>
* </pre>
* <p class="body">
* To configure the check to use header file <code
* >"java.header"</code> and <code
* >10</code> and <code>13</code> muli-lines:
* </p>
* <pre class="body">
* <module name="RegexpHeader">
* <property name="headerFile" value="java.header"/>
* <property name="multiLines" value="10, 13"/>
*</module>
* </pre>
* <p><u>Note</u>: ignoreLines property has been removed from this check to
* simplify it. The regular expression "^.*$" can be used to ignore a line.
* </p>
*
* @author Lars Khne
* @author o_sukhodolsky
*/
public class RegexpHeaderCheck extends AbstractHeaderCheck
{
/** empty array to avoid instantiations. */
private static final int[] EMPTY_INT_ARRAY = new int[0];
/** the header lines to repeat (0 or more) in the check, sorted. */
private int[] mMultiLines = EMPTY_INT_ARRAY;
/** the compiled regular expressions */
private RE[] mHeaderRegexps;
/**
* @param aLineNo a line number
* @return if <code>aLineNo</code> is one of the repeat header lines.
*/
private boolean isMultiLine(int aLineNo)
{
return (Arrays.binarySearch(mMultiLines, aLineNo + 1) >= 0);
}
/**
* Set the lines numbers to repeat in the header check.
* @param aList comma separated list of line numbers to repeat in header.
*/
public void setMultiLines(int[] aList)
{
if (aList == null || aList.length == 0) {
mMultiLines = EMPTY_INT_ARRAY;
return;
}
mMultiLines = new int[aList.length];
System.arraycopy(aList, 0, mMultiLines, 0, aList.length);
Arrays.sort(mMultiLines);
}
/**
* Sets the file that contains the header to check against.
* @param aFileName the file that contains the header to check against.
* @throws ConversionException if the file cannot be loaded or one line
* is not a regexp.
*/
public void setHeaderFile(String aFileName)
throws ConversionException
{
super.setHeaderFile(aFileName);
initHeaderRegexps();
}
/**
* Set the header to check against. Individual lines in the header
* must be separated by '\n' characters.
* @param aHeader header content to check against.
* @throws ConversionException if the header cannot be loaded or one line
* is not a regexp.
*/
public void setHeader(String aHeader)
{
super.setHeader(aHeader);
initHeaderRegexps();
}
/** Initializes {@link #mHeaderRegexps} from {@link #mHeaderLines}. */
private void initHeaderRegexps()
{
final String[] headerLines = getHeaderLines();
if (headerLines != null) {
mHeaderRegexps = new RE[headerLines.length];
for (int i = 0; i < headerLines.length; i++) {
try {
// TODO: Not sure if chache in Utils is still necessary
mHeaderRegexps[i] = Utils.getRE(headerLines[i]);
}
catch (RESyntaxException ex) {
throw new ConversionException(
"line " + i + " in header specification is not a regular expression");
}
}
}
}
/**
* Checks if a code line matches the required header line.
* @param aLineNo the linenumber to check against the header
* @param aHeaderLineNo the header line number.
* @return true if and only if the line matches the required header line.
*/
private boolean isMatch(int aLineNo, int aHeaderLineNo)
{
final String line = getLines()[aLineNo];
return mHeaderRegexps[aHeaderLineNo].match(line);
}
/** {@inheritDoc} */
public void beginTree(DetailAST aRootAST)
{
final int headerSize = getHeaderLines().length;
final int fileSize = getLines().length;
if (headerSize - mMultiLines.length > fileSize) {
log(1, "header.missing");
}
else {
int headerLineNo = 0;
int i;
for (i = 0; headerLineNo < headerSize && i < fileSize; i++) {
boolean isMatch = isMatch(i, headerLineNo);
while (!isMatch && isMultiLine(headerLineNo)) {
headerLineNo++;
isMatch = (headerLineNo == headerSize)
|| isMatch(i, headerLineNo);
}
if (!isMatch) {
log(i + 1, "header.mismatch",
getHeaderLines()[headerLineNo]);
break; // stop checking
}
if (!isMultiLine(headerLineNo)) {
headerLineNo++;
}
}
if (i == fileSize) {
// if file finished, but we have at least one non-multi-line
// header isn't completed
for (; headerLineNo < headerSize; headerLineNo++) {
if (!isMultiLine(headerLineNo)) {
log(1, "header.missing");
break;
}
}
}
}
}
}
|
rfe #1041590, allow header specification directly in checkstyle config file
|
src/checkstyle/com/puppycrawl/tools/checkstyle/checks/RegexpHeaderCheck.java
|
rfe #1041590, allow header specification directly in checkstyle config file
|
|
Java
|
lgpl-2.1
|
5b975209666bc9e7ed0af6d59f2d8bef235e6d36
| 0
|
Cazsius/Spice-of-Life-Carrot-Edition
|
package com.cazsius.solcarrot.tracking;
import com.cazsius.solcarrot.SOLCarrot;
import com.cazsius.solcarrot.SOLCarrotConfig;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.attributes.AttributeModifier;
import net.minecraft.entity.ai.attributes.IAttributeInstance;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraftforge.event.entity.player.PlayerEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod;
import javax.annotation.Nullable;
import java.util.UUID;
@Mod.EventBusSubscriber(modid = SOLCarrot.MOD_ID)
public final class MaxHealthHandler {
private static final UUID MILESTONE_HEALTH_MODIFIER_ID = UUID.fromString("b20d3436-0d39-4868-96ab-d0a4856e68c6");
@SubscribeEvent
public static void onPlayerLogin(PlayerEvent.PlayerLoggedInEvent event) {
updateFoodHPModifier(event.getPlayer());
}
@SubscribeEvent
public static void onPlayerClone(PlayerEvent.Clone event) {
AttributeModifier prevModifier = getHealthModifier(event.getOriginal());
if (prevModifier == null) return;
updateHealthModifier(event.getPlayer(), prevModifier);
}
/** @return whether or not the player reached a new milestone in this update */
public static boolean updateFoodHPModifier(PlayerEntity player) {
if (player.world.isRemote) return false;
AttributeModifier prevModifier = getHealthModifier(player);
int healthPenalty = 2 * (SOLCarrotConfig.getBaseHearts() - 10);
ProgressInfo progressInfo = FoodList.get(player).getProgressInfo();
int milestonesAchieved = progressInfo.milestonesAchieved();
int addedHealthFromFood = milestonesAchieved * 2 * SOLCarrotConfig.getHeartsPerMilestone();
double totalHealthModifier = healthPenalty + addedHealthFromFood;
boolean hasChanged = prevModifier == null || prevModifier.getAmount() != totalHealthModifier;
AttributeModifier modifier = new AttributeModifier(
MILESTONE_HEALTH_MODIFIER_ID,
"Health Gained from Trying New Foods",
totalHealthModifier,
AttributeModifier.Operation.ADDITION
);
updateHealthModifier(player, modifier);
return hasChanged;
}
@Nullable
private static AttributeModifier getHealthModifier(PlayerEntity player) {
return maxHealthAttribute(player).getModifier(MILESTONE_HEALTH_MODIFIER_ID);
}
private static void updateHealthModifier(PlayerEntity player, AttributeModifier modifier) {
float oldMax = player.getMaxHealth();
IAttributeInstance attribute = maxHealthAttribute(player);
attribute.removeModifier(modifier);
attribute.applyModifier(modifier);
float newHealth = player.getHealth() * player.getMaxHealth() / oldMax;
// because apparently it doesn't update unless changed
player.setHealth(0.1f);
// adjust current health proportionally to increase in max health
player.setHealth(newHealth);
}
private static IAttributeInstance maxHealthAttribute(PlayerEntity player) {
return player.getAttribute(SharedMonsterAttributes.MAX_HEALTH);
}
private MaxHealthHandler() {}
}
|
src/main/java/com/cazsius/solcarrot/tracking/MaxHealthHandler.java
|
package com.cazsius.solcarrot.tracking;
import com.cazsius.solcarrot.SOLCarrot;
import com.cazsius.solcarrot.SOLCarrotConfig;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.attributes.AttributeModifier;
import net.minecraft.entity.ai.attributes.IAttributeInstance;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraftforge.event.entity.player.PlayerEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod;
import javax.annotation.Nullable;
import java.util.UUID;
@Mod.EventBusSubscriber(modid = SOLCarrot.MOD_ID)
public final class MaxHealthHandler {
private static final UUID MILESTONE_HEALTH_MODIFIER_ID = UUID.fromString("b20d3436-0d39-4868-96ab-d0a4856e68c6");
@SubscribeEvent
public static void onPlayerLogin(PlayerEvent.PlayerLoggedInEvent event) {
updateFoodHPModifier(event.getPlayer());
}
@SubscribeEvent
public static void onPlayerClone(PlayerEvent.Clone event) {
AttributeModifier prevModifier = getHealthModifier(event.getOriginal());
if (prevModifier == null) return;
updateHealthModifier(event.getPlayer(), prevModifier);
}
/** @return whether or not the player reached a new milestone in this update */
public static boolean updateFoodHPModifier(PlayerEntity player) {
if (player.world.isRemote) return false;
AttributeModifier prevModifier = getHealthModifier(player);
int healthPenalty = 2 * (SOLCarrotConfig.getBaseHearts() - 10);
ProgressInfo progressInfo = FoodList.get(player).getProgressInfo();
int milestonesAchieved = progressInfo.milestonesAchieved();
int addedHealthFromFood = milestonesAchieved * 2 * SOLCarrotConfig.getHeartsPerMilestone();
double totalHealthModifier = healthPenalty + addedHealthFromFood;
if (prevModifier == null || prevModifier.getAmount() != totalHealthModifier) {
AttributeModifier modifier = new AttributeModifier(
MILESTONE_HEALTH_MODIFIER_ID,
"Health Gained from Trying New Foods",
totalHealthModifier,
AttributeModifier.Operation.ADDITION
);
float oldMax = player.getMaxHealth();
updateHealthModifier(player, modifier);
// adjust current health proportionally to increase in max health
player.setHealth(player.getHealth() * player.getMaxHealth() / oldMax);
return true;
} else {
return false;
}
}
@Nullable
private static AttributeModifier getHealthModifier(PlayerEntity player) {
return maxHealthAttribute(player).getModifier(MILESTONE_HEALTH_MODIFIER_ID);
}
private static void updateHealthModifier(PlayerEntity player, AttributeModifier modifier) {
IAttributeInstance attribute = maxHealthAttribute(player);
attribute.removeModifier(modifier);
attribute.applyModifier(modifier);
}
private static IAttributeInstance maxHealthAttribute(PlayerEntity player) {
return player.getAttribute(SharedMonsterAttributes.MAX_HEALTH);
}
private MaxHealthHandler() {}
}
|
fixed max health sometimes desyncing
|
src/main/java/com/cazsius/solcarrot/tracking/MaxHealthHandler.java
|
fixed max health sometimes desyncing
|
|
Java
|
lgpl-2.1
|
ea9094305c12236e7c6f4340282d92da754f7068
| 0
|
andreasprlic/biojava,andreasprlic/biojava,andreasprlic/biojava,heuermh/biojava,heuermh/biojava,lafita/biojava,lafita/biojava,lafita/biojava,JolantaWojcik/biojavaOwn,emckee2006/biojava,biojava/biojava,biojava/biojava,pwrose/biojava,sbliven/biojava-sbliven,biojava/biojava,sbliven/biojava-sbliven,fionakim/biojava,JolantaWojcik/biojavaOwn,pwrose/biojava,zachcp/biojava,emckee2006/biojava,heuermh/biojava,paolopavan/biojava,zachcp/biojava,andreasprlic/biojava,fionakim/biojava,paolopavan/biojava,paolopavan/biojava,zachcp/biojava,sbliven/biojava-sbliven,emckee2006/biojava,fionakim/biojava,pwrose/biojava
|
package org.biojava3.core.sequence.location.template;
import java.util.List;
import org.biojava3.core.sequence.Strand;
import org.biojava3.core.sequence.location.SimpleLocation;
/**
* Sets of integers used to represent the location of features on sequence. A
* location can be a single set of bounds or composed of multiple
* sub-locations. Each sub-location is a Location and therefore subject to the
* same rules.
*
* Rather than the normal concept of min and max here we define start and
* stop which denotes nothing about the order of the location bounds. This
* leans towards the idea of circular locations where min & max still apply
* they convey incorrect ideas and assumptions about the relationship between
* these two integer values.
*
* @author ayates
*/
public interface Location extends Iterable<Location> {
/**
* Basic location which is set to the minimum and maximum bounds of
* {@link Integer}. {@link Strand} is set to {@link Strand#UNDEFINED}.
*/
public static final Location EMPTY =
new SimpleLocation(Integer.MIN_VALUE, Integer.MAX_VALUE, Strand.UNDEFINED);
/**
* Start of the location; not necessarily the min position
*/
int getStart();
/**
* End of the location; not necessarily the max position
*/
int getEnd();
/**
* Strand which the location is located on
*/
Strand getStrand();
/**
* Gives access to the sub locations for this location. However this does
* not return sub-locations of sub-locations. For that functionality use
* {@link #getAllSubLocations()}.
*
* @return A list of a single level of sub-locations
*/
List<Location> getSubLocations();
/**
* An extension to {@link #getSubLocations()} which returns sub-locations
* of sub-locations; this will continue until it runs out of those locations.
*
* @return List of all sub locations including sub-locations of sub locations
*/
List<Location> getAllSubLocations();
/**
* Returns true if the location is considered to be complex; normally this
* means the location is actually composed of sub-locations.
*/
boolean isComplex();
/**
* Indicates if this location is circular. We do not capture how many times
* we are circular just that we are.
*/
boolean isCircular();
}
|
biojava3-core/src/main/java/org/biojava3/core/sequence/location/template/Location.java
|
package org.biojava3.core.sequence.location.template;
import java.util.List;
import org.biojava3.core.sequence.Strand;
import org.biojava3.core.sequence.location.SimpleLocation;
/**
* Sets of integers used to represent the location of features on sequence. A
* location can be a single set of bounds or composed of multiple
* sub-locations. Each sub-location is a Location and therefore subject to the
* same rules.
*
* Rather than the normal concept of min and max here we define start and
* stop which denotes nothing about the order of the location bounds. This
* leans towards the idea of circular locations where min & max still apply
* they convey incorrect ideas and assumptions about the relationship between
* these two integer values.
*
* @author ayates
*/
public interface Location extends Iterable<Location> {
/**
* Basic location which is set to the minimum and maximum bounds of
* {@link Integer}. {@link Strand} is set to {@link Strand#UNDEFINED}.
*/
public static final Location EMPTY =
new SimpleLocation(Integer.MIN_VALUE, Integer.MAX_VALUE, Strand.UNDEFINED);
/**
* Start of the location; not necessarily the min position
*/
int getStart();
/**
* Start of the location; not necessarily the max position
*/
int getEnd();
/**
* Strand which the location is located on
*/
Strand getStrand();
/**
* Gives access to the sub locations for this location. However this does
* not return sub-locations of sub-locations. For that functionality use
* {@link #getAllSubLocations()}.
*
* @return A list of a single level of sub-locations
*/
List<Location> getSubLocations();
/**
* An extension to {@link #getSubLocations()} which returns sub-locations
* of sub-locations; this will continue until it runs out of those locations.
*
* @return List of all sub locations including sub-locations of sub locations
*/
List<Location> getAllSubLocations();
/**
* Returns true if the location is considered to be complex; normally this
* means the location is actually composed of sub-locations.
*/
boolean isComplex();
/**
* Indicates if this location is circular. We do not capture how many times
* we are circular just that we are.
*/
boolean isCircular();
}
|
Documentation improvement
git-svn-id: a754228c0d23e99fefe44e35d84d6184990ceaa2@7653 7c6358e6-4a41-0410-a743-a5b2a554c398
|
biojava3-core/src/main/java/org/biojava3/core/sequence/location/template/Location.java
|
Documentation improvement
|
|
Java
|
lgpl-2.1
|
bd12c0fe28e29d8e36133d8f649b3d3ae5247d96
| 0
|
ebollens/ccnmp,cawka/ndnx,ebollens/ccnmp,svartika/ccnx,ebollens/ccnmp,ebollens/ccnmp,svartika/ccnx,svartika/ccnx,svartika/ccnx,cawka/ndnx,cawka/ndnx,svartika/ccnx,cawka/ndnx,svartika/ccnx,cawka/ndnx,svartika/ccnx
|
package org.ccnx.ccn.profiles.security.access;
import java.io.IOException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.SecureRandom;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.SecretKeySpec;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.config.ConfigurationException;
import org.ccnx.ccn.config.SystemConfiguration;
import org.ccnx.ccn.impl.CCNFlowControl.SaveType;
import org.ccnx.ccn.impl.security.crypto.ContentKeys;
import org.ccnx.ccn.impl.security.crypto.KDFContentKeys;
import org.ccnx.ccn.impl.support.DataUtils;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.io.content.ContentDecodingException;
import org.ccnx.ccn.io.content.ContentEncodingException;
import org.ccnx.ccn.io.content.ContentGoneException;
import org.ccnx.ccn.io.content.ContentNotReadyException;
import org.ccnx.ccn.io.content.WrappedKey;
import org.ccnx.ccn.io.content.WrappedKey.WrappedKeyObject;
import org.ccnx.ccn.profiles.SegmentationProfile;
import org.ccnx.ccn.profiles.namespace.NamespaceManager;
import org.ccnx.ccn.profiles.namespace.NamespaceManager.Root.RootObject;
import org.ccnx.ccn.profiles.security.access.group.NodeKey;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.PublisherPublicKeyDigest;
import org.ccnx.ccn.protocol.SignedInfo.ContentType;
public abstract class AccessControlManager {
/**
* Default data key length in bytes. No real reason this can't be bumped up to 32. It
* acts as the seed for a KDF, not an encryption key.
*/
public static final int DEFAULT_DATA_KEY_LENGTH = 16;
/**
* The keys we're wrapping are really seeds for a KDF, not keys in their own right.
* Eventually we'll use CMAC, so call them AES...
*/
public static final String DEFAULT_DATA_KEY_ALGORITHM = "AES";
public static final String DATA_KEY_LABEL = "Data Key";
protected ContentName _namespace;
protected KeyCache _keyCache;
protected CCNHandle _handle;
protected SecureRandom _random = new SecureRandom();
/**
* Factory method.
* Eventually split between a superclass AccessControlManager that handles many
* access schemes and a subclass GroupBasedAccessControlManager. For now, put
* a factory method here that makes you an ACM based on information in a stored
* root object. Have to trust that object as a function of who signed it.
*/
public static AccessControlManager createManager(RootObject policyInformation, CCNHandle handle) {
return null; // TODO fill in
}
/**
* Labels for deriving various types of keys.
* @return
*/
public String dataKeyLabel() {
return DATA_KEY_LABEL;
}
public CCNHandle handle() { return _handle; }
protected KeyCache keyCache() { return _keyCache; }
public boolean inProtectedNamespace(ContentName content) {
return _namespace.isPrefixOf(content);
}
public ContentName getNamespaceRoot() { return _namespace; }
/**
* Used by content reader to retrieve the keys necessary to decrypt this content.
* Delegates to specific subclasses to retrieve data key using retrieveWrappedDataKey,
* and then if key used to encrypt data key isn't
* in cache, delegates retrieving the unwrapping key
* to subclasses using getDataKeyUnwrappingKey. Provides a default implementation
* of retrieveDataKey.
* To turn the result of this into a key for decrypting content,
* follow the steps in the comments to #generateAndStoreDataKey(ContentName).
* @param dataNodeName
* @return
* @throws IOException
* @throws ContentDecodingException
* @throws InvalidKeyException
* @throws NoSuchAlgorithmException
*/
public Key getDataKey(ContentName dataNodeName) throws ContentDecodingException,
IOException, InvalidKeyException, NoSuchAlgorithmException {
// Let subclasses change data key storage conventions.
WrappedKeyObject wdko = retrieveWrappedDataKey(dataNodeName);
if (null == wdko) {
return null;
}
Log.finer("getDataKey: data key is wrapped by key {0} stored at {1}, attempting to retrieve.",
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()), wdko.wrappedKey().wrappingKeyName());
Key dataKey = null;
Key wrappingKey = null;
if (hasKey(wdko.wrappedKey().wrappingKeyIdentifier())) {
wrappingKey = getKey(wdko.wrappedKey().wrappingKeyIdentifier());
if (null == wrappingKey) {
Log.warning("Thought we had key {0} in cache, but cannot retrieve it! Data node: {1}.",
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()),
dataNodeName);
// fall through, try subclass retrieval
} else {
Log.fine("Unwrapping key for data node {0} with cached key whose id is {1}.", dataNodeName,
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()));
}
}
// Could simplify to remove cache-retry logic.
if (null == wrappingKey) {
// No dice. Try subclass-specific retrieval.
Log.info("getDataKey: key {0} not in cache, getting data key wrapping key for data node {0} with wrapped key {1}",
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()), dataNodeName, wdko);
wrappingKey = getDataKeyWrappingKey(dataNodeName, wdko);
}
if (null != wrappingKey) {
dataKey = wdko.wrappedKey().unwrapKey(wrappingKey);
return dataKey;
}
return null;
}
protected abstract Key getDataKeyWrappingKey(ContentName dataNodeName, WrappedKeyObject wrappedDataKeyObject) throws
InvalidKeyException, ContentNotReadyException, ContentGoneException, ContentEncodingException,
ContentDecodingException, IOException, NoSuchAlgorithmException;
protected WrappedKeyObject retrieveWrappedDataKey(ContentName dataNodeName)
throws ContentDecodingException, ContentGoneException, ContentNotReadyException, IOException {
WrappedKeyObject wdko = new WrappedKeyObject(AccessControlProfile.dataKeyName(dataNodeName), handle());
if (null == wdko.wrappedKey()) {
Log.warning("Could not retrieve data key for node: " + dataNodeName);
return null;
}
return wdko;
}
/**
* Find the key to use to wrap a data key at this node. This requires
* the current effective node key, and wrapping this data key in it. If the
* current node key is dirty, this causes a new one to be generated.
* If data at the current node is public, this returns null. Does not check
* to see whether content is excluded from encryption (e.g. by being access
* control data).
* @param dataNodeName the node for which to find a data key wrapping key
* @param publisher in case output key retrieval needs to be specialized by publisher
* @return if null, the data is to be unencrypted. (Alteratively, could
* return a NodeKey that indicates public.)
* @param newRandomDataKey
* @throws AccessDeniedException if we don't have rights to retrieve key.
* @throws InvalidKeyException
* @throws ContentEncodingException
* @throws IOException
* @throws NoSuchAlgorithmException
*/
public abstract NodeKey getDataKeyWrappingKey(ContentName dataNodeName, PublisherPublicKeyDigest publisher)
throws AccessDeniedException, InvalidKeyException,
ContentEncodingException, IOException, NoSuchAlgorithmException;
/**
* Wrap a data key in a given node key and store it.
* @param dataNodeName
* @param dataKey
* @param wrappingKey
* @throws InvalidKeyException
* @throws ContentEncodingException
* @throws IOException
*/
public void storeDataKey(ContentName dataNodeName, Key dataKey, NodeKey wrappingKey) throws InvalidKeyException, ContentEncodingException, IOException {
Log.info("storeDataKey: Wrapping data key " +
DataUtils.printHexBytes(WrappedKey.wrappingKeyIdentifier(dataKey)) + " for node: " + dataNodeName +
" with wrappingKey for node: " +
wrappingKey.nodeName() + " derived from stored node key for node: " +
wrappingKey.storedNodeKeyName());
Log.info("storeDataKey: stored node key has key id {0}, derived key has id {1}",
DataUtils.printHexBytes(wrappingKey.storedNodeKeyID()),
DataUtils.printHexBytes(WrappedKey.wrappingKeyIdentifier(wrappingKey.nodeKey())));
// TODO another case where we're wrapping in an effective node key but labeling it with
// the stored node key information. This will work except if we interpose an ACL in the meantime --
// we may not have the information necessary to figure out how to decrypt.
WrappedKey wrappedDataKey = WrappedKey.wrapKey(dataKey,
null, dataKeyLabel(),
wrappingKey.nodeKey());
wrappedDataKey.setWrappingKeyIdentifier(wrappingKey.storedNodeKeyID());
wrappedDataKey.setWrappingKeyName(wrappingKey.storedNodeKeyName());
storeKeyContent(AccessControlProfile.dataKeyName(dataNodeName), wrappedDataKey);
}
/**
* Generate a random data key.
**/
public Key generateDataKey(ContentName dataNodeName) {
// Generate new random data key of appropriate length
byte [] dataKeyBytes = new byte[DEFAULT_DATA_KEY_LENGTH];
_random.nextBytes(dataKeyBytes);
Key dataKey = new SecretKeySpec(dataKeyBytes, DEFAULT_DATA_KEY_ALGORITHM);
return dataKey;
}
/**
* Actual output functions.
* @param dataNodeName -- the content node for whom this is the data key.
* @param wrappedDataKey
* @throws IOException
* @throws ContentEncodingException
*/
protected void storeKeyContent(ContentName dataNodeName, WrappedKey wrappedKey) throws ContentEncodingException, IOException {
WrappedKeyObject wko = new WrappedKeyObject(AccessControlProfile.dataKeyName(dataNodeName), wrappedKey, SaveType.REPOSITORY, handle());
wko.save();
}
/**
* Add a private key to our cache
* @param keyName
* @param publicKeyIdentifier
* @param pk
*/
public void addPrivateKey(ContentName keyName, byte [] publicKeyIdentifier, PrivateKey pk) {
_keyCache.addPrivateKey(keyName, publicKeyIdentifier, pk);
}
/**
* Add my private key to our cache
* @param publicKeyIdentifier
* @param pk
*/
public void addMyPrivateKey(byte [] publicKeyIdentifier, PrivateKey pk) {
_keyCache.addMyPrivateKey(publicKeyIdentifier, pk);
}
/**
* Add a key to our cache
* @param name
* @param key
*/
public void addKey(ContentName name, Key key) {
_keyCache.addKey(name, key);
}
public boolean hasKey(byte [] keyID) {
return _keyCache.containsKey(keyID);
}
protected Key getKey(byte [] desiredKeyIdentifier) {
return _keyCache.getKey(desiredKeyIdentifier);
}
/**
* Given the name of a content stream, this function verifies that access is allowed and returns the
* keys required to decrypt the stream.
* @param dataNodeName The name of the stream, including version component, but excluding
* segment component.
* @param publisher the publisher to get keys for, if it matters
* @return Returns the keys ready to be used for en/decryption, or null if the content is not encrypted.
* @throws IOException
* @throws InvalidKeyException
* @throws AccessDeniedException
* @throws NoSuchAlgorithmException
*/
public ContentKeys getContentKeys(ContentName dataNodeName, PublisherPublicKeyDigest publisher)
throws InvalidKeyException, AccessDeniedException, IOException, NoSuchAlgorithmException {
if (SegmentationProfile.isSegment(dataNodeName)) {
dataNodeName = SegmentationProfile.segmentRoot(dataNodeName);
}
Key dataKey = getDataKey(dataNodeName);
if (null == dataKey)
return null;
return getDefaultAlgorithmContentKeys(dataKey);
}
public static ContentKeys getDefaultAlgorithmContentKeys(Key dataKey) throws InvalidKeyException {
try {
// TODO - figure out where algorithm spec lives
return new KDFContentKeys(ContentKeys.DEFAULT_CIPHER_ALGORITHM, dataKey.getEncoded(), DATA_KEY_LABEL);
} catch (NoSuchAlgorithmException e) {
String err = "Unexpected NoSuchAlgorithmException for default algorithm we have already used!";
Log.severe(err);
throw new InvalidKeyException(err, e);
} catch (NoSuchPaddingException e) {
String err = "Unexpected NoSuchPaddingException for default algorithm we have already used!";
Log.severe(err);
throw new InvalidKeyException(err, e);
}
}
/**
* Called when a stream is opened for reading, to determine if the name is under a root ACL, and
* if so find or create an AccessControlManager, and get keys for access. Only called if
* content is encrypted.
* @param name name of the stream to be opened, without the segment number
* @param publisher the publisher of the stream to open, in case that matters for key retrieva
* @param library CCN Library instance to use for any network operations.
* @return If the stream is under access control then keys to decrypt the data are returned if it's
* encrypted. If the stream is not under access control (no Root ACL block can be found) then null is
* returned.
* @throws IOException if a problem happens getting keys.
*/
public static ContentKeys keysForInput(ContentName name, PublisherPublicKeyDigest publisher, CCNHandle handle)
throws IOException {
AccessControlManager acm;
try {
acm = NamespaceManager.findACM(name, handle);
if (acm != null) {
Log.info("keysForInput: retrieving key for data node {0}", name);
return acm.getContentKeys(name, publisher);
}
} catch (ConfigurationException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("ConfigurationException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (InvalidKeyException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("InvalidKeyException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (NoSuchAlgorithmException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("NoSuchAlgorithmException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
}
return null;
}
/**
* Get keys to encrypt content as its' written, if that content is to be protected.
* @param name
* @param publisher
* @param type the type of content to be written. Mostly used to determine whether
* content is protected, but could also be used to specialize keys.
* @param handle
* @return
* @throws IOException
*/
public static ContentKeys keysForOutput(ContentName name, PublisherPublicKeyDigest publisher, ContentType contentType, CCNHandle handle)
throws IOException {
if (SystemConfiguration.disableAccessControl()) {
Log.finest("Access control disabled, not searching for keys for {0}.", name);
return null;
}
AccessControlManager acm;
try {
acm = NamespaceManager.findACM(name, handle);
Log.info("keysForOutput: found an acm: " + acm);
if ((acm != null) && (acm.isProtectedContent(name, publisher, contentType, handle))) {
// First we need to figure out whether this content is public or unprotected...
Log.info("keysForOutput: found ACM, protected content, generating new data key for data node {0}", name);
NodeKey dataKeyWrappingKey = acm.getDataKeyWrappingKey(name, publisher);
if (null == dataKeyWrappingKey) {
// if content is public -- either null or a special value would work
return null; // no keys
}
Key dataKey = acm.generateDataKey(name);
Log.finer("keysForOutput: content {0} publisher {1} data key {2} wrapping key {3}", name, publisher,
DataUtils.printHexBytes(dataKey.getEncoded()), dataKeyWrappingKey);
acm.storeDataKey(name, dataKey, dataKeyWrappingKey);
return getDefaultAlgorithmContentKeys(dataKey);
}
} catch (ConfigurationException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("ConfigurationException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (InvalidKeyException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("InvalidKeyException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (NoSuchAlgorithmException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("NoSuchAlgorithmException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
}
return null;
}
/**
* Allow AccessControlManagers to specify some content is not to be protected; for example,
* access control lists are not themselves encrypted.
* TODO: should headers be exempt from encryption?
*/
public boolean isProtectedContent(ContentName name, PublisherPublicKeyDigest publisher, ContentType contentType, CCNHandle hande) {
if (!inProtectedNamespace(name)) {
return false;
}
if (AccessControlProfile.isAccessName(name)) {
// Don't encrypt the access control metadata itself, or we couldn't get the
// keys to decrypt the other stuff.
return false;
}
return true;
}
}
|
javasrc/src/org/ccnx/ccn/profiles/security/access/AccessControlManager.java
|
package org.ccnx.ccn.profiles.security.access;
import java.io.IOException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.SecureRandom;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.SecretKeySpec;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.config.ConfigurationException;
import org.ccnx.ccn.config.SystemConfiguration;
import org.ccnx.ccn.impl.CCNFlowControl.SaveType;
import org.ccnx.ccn.impl.security.crypto.ContentKeys;
import org.ccnx.ccn.impl.security.crypto.KDFContentKeys;
import org.ccnx.ccn.impl.support.DataUtils;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.io.content.ContentDecodingException;
import org.ccnx.ccn.io.content.ContentEncodingException;
import org.ccnx.ccn.io.content.ContentGoneException;
import org.ccnx.ccn.io.content.ContentNotReadyException;
import org.ccnx.ccn.io.content.WrappedKey;
import org.ccnx.ccn.io.content.WrappedKey.WrappedKeyObject;
import org.ccnx.ccn.profiles.SegmentationProfile;
import org.ccnx.ccn.profiles.namespace.NamespaceManager;
import org.ccnx.ccn.profiles.namespace.NamespaceManager.Root.RootObject;
import org.ccnx.ccn.profiles.security.access.group.NodeKey;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.PublisherPublicKeyDigest;
import org.ccnx.ccn.protocol.SignedInfo.ContentType;
public abstract class AccessControlManager {
/**
* Default data key length in bytes. No real reason this can't be bumped up to 32. It
* acts as the seed for a KDF, not an encryption key.
*/
public static final int DEFAULT_DATA_KEY_LENGTH = 16;
/**
* The keys we're wrapping are really seeds for a KDF, not keys in their own right.
* Eventually we'll use CMAC, so call them AES...
*/
public static final String DEFAULT_DATA_KEY_ALGORITHM = "AES";
public static final String DATA_KEY_LABEL = "Data Key";
protected ContentName _namespace;
protected KeyCache _keyCache;
protected CCNHandle _handle;
protected SecureRandom _random = new SecureRandom();
/**
* Factory method.
* Eventually split between a superclass AccessControlManager that handles many
* access schemes and a subclass GroupBasedAccessControlManager. For now, put
* a factory method here that makes you an ACM based on information in a stored
* root object. Have to trust that object as a function of who signed it.
*/
public static AccessControlManager createManager(RootObject policyInformation, CCNHandle handle) {
return null; // TODO fill in
}
/**
* Labels for deriving various types of keys.
* @return
*/
public String dataKeyLabel() {
return DATA_KEY_LABEL;
}
public CCNHandle handle() { return _handle; }
protected KeyCache keyCache() { return _keyCache; }
public boolean inProtectedNamespace(ContentName content) {
return _namespace.isPrefixOf(content);
}
public ContentName getNamespaceRoot() { return _namespace; }
/**
* Used by content reader to retrieve the keys necessary to decrypt this content.
* Delegates to specific subclasses to retrieve data key using retrieveWrappedDataKey,
* and then if key used to encrypt data key isn't
* in cache, delegates retrieving the unwrapping key
* to subclasses using getDataKeyUnwrappingKey. Provides a default implementation
* of retrieveDataKey.
* To turn the result of this into a key for decrypting content,
* follow the steps in the comments to #generateAndStoreDataKey(ContentName).
* @param dataNodeName
* @return
* @throws IOException
* @throws ContentDecodingException
* @throws InvalidKeyException
* @throws NoSuchAlgorithmException
*/
public Key getDataKey(ContentName dataNodeName) throws ContentDecodingException,
IOException, InvalidKeyException, NoSuchAlgorithmException {
// Let subclasses change data key storage conventions.
WrappedKeyObject wdko = retrieveWrappedDataKey(dataNodeName);
if (null == wdko) {
return null;
}
Log.finer("getDataKey: data key is wrapped by key {0} stored at {1}, attempting to retrieve.",
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()), wdko.wrappedKey().wrappingKeyName());
Key dataKey = null;
Key wrappingKey = null;
if (hasKey(wdko.wrappedKey().wrappingKeyIdentifier())) {
wrappingKey = getKey(wdko.wrappedKey().wrappingKeyIdentifier());
if (null == wrappingKey) {
Log.warning("Thought we had key {0} in cache, but cannot retrieve it! Data node: {1}.",
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()),
dataNodeName);
// fall through, try subclass retrieval
} else {
Log.fine("Unwrapping key for data node {0} with cached key whose id is {1}.", dataNodeName,
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()));
}
}
// Could simplify to remove cache-retry logic.
if (null == wrappingKey) {
// No dice. Try subclass-specific retrieval.
Log.info("getDataKey: key {0} not in cache, getting data key wrapping key for data node {0} with wrapped key {1}",
DataUtils.printHexBytes(wdko.wrappedKey().wrappingKeyIdentifier()), dataNodeName, wdko);
wrappingKey = getDataKeyWrappingKey(dataNodeName, wdko);
}
if (null != wrappingKey) {
dataKey = wdko.wrappedKey().unwrapKey(wrappingKey);
return dataKey;
}
return null;
}
protected abstract Key getDataKeyWrappingKey(ContentName dataNodeName, WrappedKeyObject wrappedDataKeyObject) throws
InvalidKeyException, ContentNotReadyException, ContentGoneException, ContentEncodingException,
ContentDecodingException, IOException, NoSuchAlgorithmException;
protected WrappedKeyObject retrieveWrappedDataKey(ContentName dataNodeName)
throws ContentDecodingException, ContentGoneException, ContentNotReadyException, IOException {
WrappedKeyObject wdko = new WrappedKeyObject(AccessControlProfile.dataKeyName(dataNodeName), handle());
if (null == wdko.wrappedKey()) {
Log.warning("Could not retrieve data key for node: " + dataNodeName);
return null;
}
return wdko;
}
/**
* Find the key to use to wrap a data key at this node. This requires
* the current effective node key, and wrapping this data key in it. If the
* current node key is dirty, this causes a new one to be generated.
* If data at the current node is public, this returns null. Does not check
* to see whether content is excluded from encryption (e.g. by being access
* control data).
* @param dataNodeName the node for which to find a data key wrapping key
* @param publisher in case output key retrieval needs to be specialized by publisher
* @return if null, the data is to be unencrypted. (Alteratively, could
* return a NodeKey that indicates public.)
* @param newRandomDataKey
* @throws AccessDeniedException if we don't have rights to retrieve key.
* @throws InvalidKeyException
* @throws ContentEncodingException
* @throws IOException
* @throws NoSuchAlgorithmException
*/
public abstract NodeKey getDataKeyWrappingKey(ContentName dataNodeName, PublisherPublicKeyDigest publisher)
throws AccessDeniedException, InvalidKeyException,
ContentEncodingException, IOException, NoSuchAlgorithmException;
/**
* Wrap a data key in a given node key and store it.
* @param dataNodeName
* @param dataKey
* @param wrappingKey
* @throws InvalidKeyException
* @throws ContentEncodingException
* @throws IOException
*/
public void storeDataKey(ContentName dataNodeName, Key dataKey, NodeKey wrappingKey) throws InvalidKeyException, ContentEncodingException, IOException {
Log.info("storeDataKey: Wrapping data key " +
DataUtils.printHexBytes(WrappedKey.wrappingKeyIdentifier(dataKey)) + " for node: " + dataNodeName +
" with wrappingKey for node: " +
wrappingKey.nodeName() + " derived from stored node key for node: " +
wrappingKey.storedNodeKeyName());
Log.info("storeDataKey: stored node key has key id {0}, derived key has id {1}",
DataUtils.printHexBytes(wrappingKey.storedNodeKeyID()),
DataUtils.printHexBytes(WrappedKey.wrappingKeyIdentifier(wrappingKey.nodeKey())));
// TODO another case where we're wrapping in an effective node key but labeling it with
// the stored node key information. This will work except if we interpose an ACL in the meantime --
// we may not have the information necessary to figure out how to decrypt.
WrappedKey wrappedDataKey = WrappedKey.wrapKey(dataKey,
null, dataKeyLabel(),
wrappingKey.nodeKey());
wrappedDataKey.setWrappingKeyIdentifier(WrappedKey.wrappingKeyIdentifier(wrappingKey.nodeKey()));
wrappedDataKey.setWrappingKeyName(wrappingKey.nodeName());
storeKeyContent(AccessControlProfile.dataKeyName(dataNodeName), wrappedDataKey);
}
/**
* Generate a random data key.
**/
public Key generateDataKey(ContentName dataNodeName) {
// Generate new random data key of appropriate length
byte [] dataKeyBytes = new byte[DEFAULT_DATA_KEY_LENGTH];
_random.nextBytes(dataKeyBytes);
Key dataKey = new SecretKeySpec(dataKeyBytes, DEFAULT_DATA_KEY_ALGORITHM);
return dataKey;
}
/**
* Actual output functions.
* @param dataNodeName -- the content node for whom this is the data key.
* @param wrappedDataKey
* @throws IOException
* @throws ContentEncodingException
*/
protected void storeKeyContent(ContentName dataNodeName, WrappedKey wrappedKey) throws ContentEncodingException, IOException {
WrappedKeyObject wko = new WrappedKeyObject(AccessControlProfile.dataKeyName(dataNodeName), wrappedKey, SaveType.REPOSITORY, handle());
wko.save();
}
/**
* Add a private key to our cache
* @param keyName
* @param publicKeyIdentifier
* @param pk
*/
public void addPrivateKey(ContentName keyName, byte [] publicKeyIdentifier, PrivateKey pk) {
_keyCache.addPrivateKey(keyName, publicKeyIdentifier, pk);
}
/**
* Add my private key to our cache
* @param publicKeyIdentifier
* @param pk
*/
public void addMyPrivateKey(byte [] publicKeyIdentifier, PrivateKey pk) {
_keyCache.addMyPrivateKey(publicKeyIdentifier, pk);
}
/**
* Add a key to our cache
* @param name
* @param key
*/
public void addKey(ContentName name, Key key) {
_keyCache.addKey(name, key);
}
public boolean hasKey(byte [] keyID) {
return _keyCache.containsKey(keyID);
}
protected Key getKey(byte [] desiredKeyIdentifier) {
return _keyCache.getKey(desiredKeyIdentifier);
}
/**
* Given the name of a content stream, this function verifies that access is allowed and returns the
* keys required to decrypt the stream.
* @param dataNodeName The name of the stream, including version component, but excluding
* segment component.
* @param publisher the publisher to get keys for, if it matters
* @return Returns the keys ready to be used for en/decryption, or null if the content is not encrypted.
* @throws IOException
* @throws InvalidKeyException
* @throws AccessDeniedException
* @throws NoSuchAlgorithmException
*/
public ContentKeys getContentKeys(ContentName dataNodeName, PublisherPublicKeyDigest publisher)
throws InvalidKeyException, AccessDeniedException, IOException, NoSuchAlgorithmException {
if (SegmentationProfile.isSegment(dataNodeName)) {
dataNodeName = SegmentationProfile.segmentRoot(dataNodeName);
}
Key dataKey = getDataKey(dataNodeName);
if (null == dataKey)
return null;
return getDefaultAlgorithmContentKeys(dataKey);
}
public static ContentKeys getDefaultAlgorithmContentKeys(Key dataKey) throws InvalidKeyException {
try {
// TODO - figure out where algorithm spec lives
return new KDFContentKeys(ContentKeys.DEFAULT_CIPHER_ALGORITHM, dataKey.getEncoded(), DATA_KEY_LABEL);
} catch (NoSuchAlgorithmException e) {
String err = "Unexpected NoSuchAlgorithmException for default algorithm we have already used!";
Log.severe(err);
throw new InvalidKeyException(err, e);
} catch (NoSuchPaddingException e) {
String err = "Unexpected NoSuchPaddingException for default algorithm we have already used!";
Log.severe(err);
throw new InvalidKeyException(err, e);
}
}
/**
* Called when a stream is opened for reading, to determine if the name is under a root ACL, and
* if so find or create an AccessControlManager, and get keys for access. Only called if
* content is encrypted.
* @param name name of the stream to be opened, without the segment number
* @param publisher the publisher of the stream to open, in case that matters for key retrieva
* @param library CCN Library instance to use for any network operations.
* @return If the stream is under access control then keys to decrypt the data are returned if it's
* encrypted. If the stream is not under access control (no Root ACL block can be found) then null is
* returned.
* @throws IOException if a problem happens getting keys.
*/
public static ContentKeys keysForInput(ContentName name, PublisherPublicKeyDigest publisher, CCNHandle handle)
throws IOException {
AccessControlManager acm;
try {
acm = NamespaceManager.findACM(name, handle);
if (acm != null) {
Log.info("keysForInput: retrieving key for data node {0}", name);
return acm.getContentKeys(name, publisher);
}
} catch (ConfigurationException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("ConfigurationException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (InvalidKeyException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("InvalidKeyException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (NoSuchAlgorithmException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("NoSuchAlgorithmException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
}
return null;
}
/**
* Get keys to encrypt content as its' written, if that content is to be protected.
* @param name
* @param publisher
* @param type the type of content to be written. Mostly used to determine whether
* content is protected, but could also be used to specialize keys.
* @param handle
* @return
* @throws IOException
*/
public static ContentKeys keysForOutput(ContentName name, PublisherPublicKeyDigest publisher, ContentType contentType, CCNHandle handle)
throws IOException {
if (SystemConfiguration.disableAccessControl()) {
Log.finest("Access control disabled, not searching for keys for {0}.", name);
return null;
}
AccessControlManager acm;
try {
acm = NamespaceManager.findACM(name, handle);
Log.info("keysForOutput: found an acm: " + acm);
if ((acm != null) && (acm.isProtectedContent(name, publisher, contentType, handle))) {
// First we need to figure out whether this content is public or unprotected...
Log.info("keysForOutput: found ACM, protected content, generating new data key for data node {0}", name);
NodeKey dataKeyWrappingKey = acm.getDataKeyWrappingKey(name, publisher);
if (null == dataKeyWrappingKey) {
// if content is public -- either null or a special value would work
return null; // no keys
}
Key dataKey = acm.generateDataKey(name);
Log.finer("keysForOutput: content {0} publisher {1} data key {2} wrapping key {3}", name, publisher,
DataUtils.printHexBytes(dataKey.getEncoded()), dataKeyWrappingKey);
acm.storeDataKey(name, dataKey, dataKeyWrappingKey);
return getDefaultAlgorithmContentKeys(dataKey);
}
} catch (ConfigurationException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("ConfigurationException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (InvalidKeyException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("InvalidKeyException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
} catch (NoSuchAlgorithmException e) {
// TODO use 1.6 constuctors that take nested exceptions when can move off 1.5
Log.logException("NoSuchAlgorithmException in keysForInput", e);
throw new IOException(e.getClass().getName() + ": Opening stream for input: " + e.getMessage());
}
return null;
}
/**
* Allow AccessControlManagers to specify some content is not to be protected; for example,
* access control lists are not themselves encrypted.
* TODO: should headers be exempt from encryption?
*/
public boolean isProtectedContent(ContentName name, PublisherPublicKeyDigest publisher, ContentType contentType, CCNHandle hande) {
if (!inProtectedNamespace(name)) {
return false;
}
if (AccessControlProfile.isAccessName(name)) {
// Don't encrypt the access control metadata itself, or we couldn't get the
// keys to decrypt the other stuff.
return false;
}
return true;
}
}
|
Revert "Bug fix in AccessControlManager"
This reverts commit b0b0e8d54ced02d8af60ee325fcc502fdeebaf05.
This turned out not to be a bug; we changed it and then fixed what
was probably the actual bug (759aec54f97e3b2cdfdbb033e54c6d707637c661),
and for some strange reason it worked in our test case. Don't know
why... Back it out and see what happens.
|
javasrc/src/org/ccnx/ccn/profiles/security/access/AccessControlManager.java
|
Revert "Bug fix in AccessControlManager"
|
|
Java
|
apache-2.0
|
7f186701ddceb5179ed76d8a52b073291073e552
| 0
|
apache/directory-project
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.handler.support;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.mina.common.ByteBuffer;
import org.apache.mina.common.IoSession;
import org.apache.mina.common.WriteFuture;
/**
* An {@link OutputStream} that forwards all write operations to
* the associated {@link IoSession}.
*
* @author The Apache Directory Project (mina-dev@directory.apache.org)
* @version $Rev$, $Date$
*
*/
public class IoSessionOutputStream extends OutputStream
{
private final IoSession session;
private WriteFuture lastWriteFuture;
public IoSessionOutputStream( IoSession session )
{
this.session = session;
}
public void close()
{
session.close().join();
}
private void checkClosed() throws IOException
{
if( ! session.isConnected() )
{
throw new IOException( "The session has been closed." );
}
}
private synchronized void write( ByteBuffer buf ) throws IOException
{
checkClosed();
WriteFuture future = session.write( buf );
lastWriteFuture = future;
}
public void write( byte[] b, int off, int len ) throws IOException
{
write( ByteBuffer.wrap( b, off, len ) );
}
public void write( int b ) throws IOException
{
ByteBuffer buf = ByteBuffer.allocate( 1 );
buf.put( ( byte ) b );
buf.flip();
write( buf );
}
public synchronized void flush() throws IOException
{
if( lastWriteFuture == null )
{
return;
}
lastWriteFuture.join();
if( !lastWriteFuture.isWritten() )
{
throw new IOException( "The bytes could not be written to the session" );
}
}
}
|
mina/core/src/main/java/org/apache/mina/handler/support/IoSessionOutputStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.handler.support;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.mina.common.ByteBuffer;
import org.apache.mina.common.IoSession;
import org.apache.mina.common.WriteFuture;
/**
* An {@link OutputStream} that forwards all write operations to
* the associated {@link IoSession}.
*
* @author The Apache Directory Project (mina-dev@directory.apache.org)
* @version $Rev$, $Date$
*
*/
public class IoSessionOutputStream extends OutputStream
{
private final IoSession session;
public IoSessionOutputStream( IoSession session )
{
this.session = session;
}
public void close()
{
session.close().join();
}
private void checkClosed() throws IOException
{
if( ! session.isConnected() )
{
throw new IOException( "The session has been closed." );
}
}
private void write( ByteBuffer buf ) throws IOException
{
checkClosed();
WriteFuture future = session.write( buf );
future.join();
if( ! future.isWritten() )
{
throw new IOException( "The bytes could not be written to the session" );
}
}
public void write( byte[] b, int off, int len ) throws IOException
{
write( ByteBuffer.wrap( b, off, len ) );
}
public void write( int b ) throws IOException
{
ByteBuffer buf = ByteBuffer.allocate( 1 );
buf.put( ( byte ) b );
buf.flip();
write( buf );
}
}
|
Changed IoSessionOutputStream to wait for the WriteFuture only when a user calls flush().
git-svn-id: 5c3b06693d750a6aefbf1081b6b7d57c0165fdb2@446482 13f79535-47bb-0310-9956-ffa450edef68
|
mina/core/src/main/java/org/apache/mina/handler/support/IoSessionOutputStream.java
|
Changed IoSessionOutputStream to wait for the WriteFuture only when a user calls flush().
|
|
Java
|
apache-2.0
|
9b7caf99502a229d3fda6f0b8bd485c9ed7230d8
| 0
|
fthevenet/binjr,fthevenet/binjr
|
/*
* Copyright 2016-2021 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.binjr.core.controllers;
import com.sun.javafx.charts.Legend;
import eu.binjr.common.io.IOUtils;
import eu.binjr.common.javafx.charts.*;
import eu.binjr.common.javafx.controls.*;
import eu.binjr.common.logging.Logger;
import eu.binjr.common.logging.Profiler;
import eu.binjr.common.text.NoopPrefixFormatter;
import eu.binjr.core.data.adapters.DataAdapter;
import eu.binjr.core.data.adapters.SourceBinding;
import eu.binjr.core.data.adapters.TimeSeriesBinding;
import eu.binjr.core.data.async.AsyncTaskManager;
import eu.binjr.core.data.exceptions.DataAdapterException;
import eu.binjr.core.data.exceptions.NoAdapterFoundException;
import eu.binjr.core.data.workspace.Chart;
import eu.binjr.core.data.workspace.*;
import eu.binjr.core.dialogs.Dialogs;
import eu.binjr.core.preferences.SnapshotOutputScale;
import eu.binjr.core.preferences.UserHistory;
import eu.binjr.core.preferences.UserPreferences;
import javafx.application.Platform;
import javafx.beans.InvalidationListener;
import javafx.beans.binding.Bindings;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.binding.DoubleBinding;
import javafx.beans.property.*;
import javafx.beans.value.ChangeListener;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.css.PseudoClass;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.*;
import javafx.scene.CacheHint;
import javafx.scene.Group;
import javafx.scene.Node;
import javafx.scene.chart.*;
import javafx.scene.control.*;
import javafx.scene.control.cell.CheckBoxTableCell;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.image.WritableImage;
import javafx.scene.input.*;
import javafx.scene.layout.*;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Path;
import javafx.scene.text.TextAlignment;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.util.Duration;
import org.controlsfx.control.MaskerPane;
import javax.imageio.ImageIO;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import static javafx.scene.layout.Region.USE_COMPUTED_SIZE;
/**
* The controller class for the time series view.
*
* @author Frederic Thevenet
*/
public class XYChartsWorksheetController extends WorksheetController {
public static final String WORKSHEET_VIEW_FXML = "/eu/binjr/views/XYChartsWorksheetView.fxml";
private static final DataFormat SERIALIZED_MIME_TYPE = new DataFormat("application/x-java-serialized-object");
private static final DataFormat VIEWPORT_DRAG_FORMAT = new DataFormat("viewport_drag_format");
private static final Logger logger = Logger.create(XYChartsWorksheetController.class);
private static final double Y_AXIS_SEPARATION = 10;
private static final PseudoClass HOVER_PSEUDO_CLASS = PseudoClass.getPseudoClass("hover");
private final UserPreferences userPrefs = UserPreferences.getInstance();
private final ToggleGroup editButtonsGroup = new ToggleGroup();
private final IntegerProperty nbBusyPlotTasks = new SimpleIntegerProperty(0);
@FXML
public AnchorPane root;
@FXML
Pane newChartDropTarget;
private List<ChartViewPort> viewPorts = new ArrayList<>();
private XYChartsWorksheet worksheet;
private volatile boolean preventReload = false;
private final AtomicBoolean closed = new AtomicBoolean(false);
@FXML
private Pane chartParent;
@FXML
private AnchorPane chartViewport;
@FXML
private AnchorPane chartView;
@FXML
private TextField yMinRange;
@FXML
private TextField yMaxRange;
@FXML
private StackPane seriesTableContainer;
@FXML
private SplitPane splitPane;
@FXML
private Button backButton;
@FXML
private Button forwardButton;
@FXML
private Button refreshButton;
@FXML
private Button snapshotButton;
@FXML
private ToggleButton vCrosshair;
@FXML
private ToggleButton hCrosshair;
@FXML
private Button addChartButton;
@FXML
private MaskerPane worksheetMaskerPane;
@FXML
private ContextMenu seriesListMenu;
@FXML
private Button selectChartLayout;
@FXML
private TimeRangePicker timeRangePicker;
@FXML
private AnchorPane chartsLegendsPane;
@FXML
private DrawerPane chartProperties;
@FXML
private ToolBar chartsToolbar;
@FXML
private HBox navigationToolbar;
private ChartViewportsState currentState;
private Pane worksheetTitleBlock;
private VBox screenshotCanvas;
private Profiler worksheetRefreshProfiler = null;
public XYChartsWorksheetController(MainViewController parentController, XYChartsWorksheet worksheet, Collection<DataAdapter<Double>> sourcesAdapters)
throws NoAdapterFoundException {
super(parentController);
this.worksheet = worksheet;
// Attach bindings
for (Chart chart : worksheet.getCharts()) {
for (TimeSeriesInfo<Double> s : chart.getSeries()) {
UUID id = s.getBinding().getAdapterId();
DataAdapter<Double> da = sourcesAdapters
.stream()
.filter(a -> (id != null && a != null && a.getId() != null) && id.equals(a.getId()))
.findAny()
.orElseThrow(() -> new NoAdapterFoundException("Failed to find a valid adapter with id " + (id != null ? id.toString() : "null")));
s.getBinding().setAdapter(da);
}
}
}
private static String colorToRgbaString(Color c) {
return String.format("rgba(%d,%d,%d,%f)", Math.round(c.getRed() * 255), Math.round(c.getGreen() * 255), Math.round(c.getBlue() * 255), c.getOpacity());
}
private Optional<List<Chart>> treeItemsAsChartList(Collection<TreeItem<SourceBinding>> treeItems, Node dlgRoot) {
var charts = new ArrayList<Chart>();
var totalBindings = 0;
for (var treeItem : treeItems) {
for (var t : TreeViewUtils.splitAboveLeaves(treeItem, true)) {
if (t.getValue() instanceof TimeSeriesBinding binding) {
Chart chart = new Chart(
binding.getLegend(),
binding.getGraphType(),
binding.getUnitName(),
binding.getUnitPrefix()
);
for (var b : TreeViewUtils.flattenLeaves(t)) {
if (b instanceof TimeSeriesBinding leafBinding) {
chart.addSeries(TimeSeriesInfo.fromBinding(leafBinding));
totalBindings++;
}
}
charts.add(chart);
}
}
}
if (totalBindings >= UserPreferences.getInstance().maxSeriesPerChartBeforeWarning.get().intValue()) {
if (Dialogs.confirmDialog(dlgRoot,
"This action will add " + totalBindings + " series on a single worksheet.",
"Are you sure you want to proceed?",
ButtonType.YES, ButtonType.NO) != ButtonType.YES) {
return Optional.empty();
}
}
return Optional.of(charts);
}
private ChartPropertiesController buildChartPropertiesController(Chart chart) throws IOException {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/eu/binjr/views/ChartPropertiesView.fxml"));
ChartPropertiesController propertiesController = new ChartPropertiesController(worksheet, chart);
loader.setController(propertiesController);
Pane settingsPane = loader.load();
chartProperties.getChildren().add(settingsPane);
AnchorPane.setRightAnchor(settingsPane, 0.0);
AnchorPane.setBottomAnchor(settingsPane, 0.0);
AnchorPane.setTopAnchor(settingsPane, 0.0);
AnchorPane.setLeftAnchor(settingsPane, 0.0);
settingsPane.getStyleClass().add("toolPane");
return propertiesController;
}
@Override
public Worksheet getWorksheet() {
return this.worksheet;
}
@Override
public void initialize(URL location, ResourceBundle resources) {
assert root != null : "fx:id\"root\" was not injected!";
assert chartParent != null : "fx:id\"chartParent\" was not injected!";
assert seriesTableContainer != null : "fx:id\"seriesTableContainer\" was not injected!";
assert backButton != null : "fx:id\"backButton\" was not injected!";
assert forwardButton != null : "fx:id\"forwardButton\" was not injected!";
assert refreshButton != null : "fx:id\"refreshButton\" was not injected!";
assert vCrosshair != null : "fx:id\"vCrosshair\" was not injected!";
assert hCrosshair != null : "fx:id\"hCrosshair\" was not injected!";
assert snapshotButton != null : "fx:id\"snapshotButton\" was not injected!";
try {
getBindingManager().bind(worksheetMaskerPane.visibleProperty(), nbBusyPlotTasks.greaterThan(0));
initChartViewPorts();
initNavigationPane();
initTableViewPane();
Platform.runLater(() -> invalidate(false, false, false));
getBindingManager().attachListener(userPrefs.downSamplingEnabled.property(), ((observable, oldValue, newValue) -> refresh()));
getBindingManager().attachListener(userPrefs.downSamplingThreshold.property(), ((observable, oldValue, newValue) -> {
if (userPrefs.downSamplingEnabled.get())
refresh();
}));
getBindingManager().attachListener(userPrefs.downSamplingAlgorithm.property(), ((observable, oldValue, newValue) -> {
if (userPrefs.downSamplingEnabled.get())
refresh();
}));
newChartDropTarget.setOnDragOver(getBindingManager().registerHandler(this::handleDragOverNewChartTarget));
newChartDropTarget.setOnDragDropped(getBindingManager().registerHandler(this::handleDragDroppedONewChartTarget));
newChartDropTarget.setOnDragEntered(getBindingManager().registerHandler(event -> newChartDropTarget.pseudoClassStateChanged(HOVER_PSEUDO_CLASS, true)));
newChartDropTarget.setOnDragExited(getBindingManager().registerHandler(event -> newChartDropTarget.pseudoClassStateChanged(HOVER_PSEUDO_CLASS, false)));
getBindingManager().bind(newChartDropTarget.managedProperty(), getParentController().treeItemDragAndDropInProgressProperty());
getBindingManager().bind(newChartDropTarget.visibleProperty(), getParentController().treeItemDragAndDropInProgressProperty());
setSelectedChart(worksheet.getSelectedChart());
} catch (Exception e) {
Platform.runLater(() -> Dialogs.notifyException("Error loading worksheet controller", e, root));
}
super.initialize(location, resources);
}
@Override
protected void setEditChartMode(Boolean newValue) {
if (!newValue) {
getBindingManager().suspend(worksheet.dividerPositionProperty());
splitPane.setDividerPositions(1.0);
chartsLegendsPane.setVisible(false);
chartsLegendsPane.setMaxHeight(0.0);
} else {
chartsLegendsPane.setMaxHeight(Double.MAX_VALUE);
chartsLegendsPane.setVisible(true);
splitPane.setDividerPositions(worksheet.getDividerPosition());
getBindingManager().resume(worksheet.dividerPositionProperty());
}
setShowPropertiesPane(newValue);
super.setEditChartMode(newValue);
}
private ZonedDateTimeAxis buildTimeAxis() {
ZonedDateTimeAxis axis = new ZonedDateTimeAxis(worksheet.getTimeZone());
getBindingManager().bind(axis.zoneIdProperty(), worksheet.timeZoneProperty());
axis.setAnimated(false);
axis.setSide(Side.BOTTOM);
return axis;
}
private void initChartViewPorts() throws IOException {
ZonedDateTimeAxis defaultXAxis = buildTimeAxis();
if (worksheet.getCharts().size() == 0) {
worksheet.getCharts().add(new Chart());
}
for (int i = 0; i < worksheet.getCharts().size(); i++) {
final int currentIndex = i;
final Chart currentChart = worksheet.getCharts().get(i);
ZonedDateTimeAxis xAxis;
switch (worksheet.getChartLayout()) {
case OVERLAID:
xAxis = defaultXAxis;
break;
case STACKED:
default:
xAxis = buildTimeAxis();
break;
}
StableTicksAxis<Double> yAxis;
switch (currentChart.getUnitPrefixes()) {
case BINARY:
yAxis = new BinaryStableTicksAxis<>();
break;
case METRIC:
yAxis = new MetricStableTicksAxis<>();
break;
case NONE:
default:
yAxis = new StableTicksAxis<>(new NoopPrefixFormatter(), 10, new double[]{1.0, 2.5, 5.0});
}
yAxis.autoRangingProperty().bindBidirectional(currentChart.autoScaleYAxisProperty());
yAxis.setAnimated(false);
yAxis.setTickSpacing(30);
getBindingManager().bind(yAxis.labelProperty(),
Bindings.createStringBinding(
() -> String.format("%s - %s", currentChart.getName(), currentChart.getUnit()),
currentChart.nameProperty(),
currentChart.unitProperty()));
XYChart<ZonedDateTime, Double> viewPort;
switch (currentChart.getChartType()) {
case AREA:
viewPort = new AreaChart<>(xAxis, yAxis);
((AreaChart) viewPort).setCreateSymbols(false);
break;
case STACKED:
viewPort = new NaNStackedAreaChart<>(xAxis, yAxis);
((StackedAreaChart) viewPort).setCreateSymbols(false);
break;
case SCATTER:
viewPort = new ScatterChart<>(xAxis, yAxis);
break;
case LINE:
default:
viewPort = new LineChart<>(xAxis, yAxis);
((LineChart) viewPort).setCreateSymbols(false);
}
viewPort.setCache(true);
viewPort.setCacheHint(CacheHint.SPEED);
viewPort.setCacheShape(true);
viewPort.setFocusTraversable(true);
viewPort.legendVisibleProperty().bind(worksheet.editModeEnabledProperty()
.not()
.and(Bindings.equal(ChartLayout.STACKED, (ObjectProperty) worksheet.chartLayoutProperty())));
viewPort.setLegendSide(Side.BOTTOM);
viewPort.setAnimated(false);
viewPorts.add(new ChartViewPort(currentChart, viewPort, buildChartPropertiesController(currentChart)));
viewPort.getYAxis().addEventFilter(MouseEvent.MOUSE_CLICKED, getBindingManager().registerHandler(event -> {
worksheet.setSelectedChart(currentIndex, event.isControlDown());
}));
getBindingManager().bind(((StableTicksAxis) viewPort.getYAxis()).selectionMarkerVisibleProperty(), worksheet.editModeEnabledProperty());
viewPort.setOnDragOver(getBindingManager().registerHandler(this::handleDragOverWorksheetView));
viewPort.setOnDragDropped(getBindingManager().registerHandler(this::handleDragDroppedOnWorksheetView));
viewPort.setOnDragEntered(getBindingManager().registerHandler(event -> {
if (closed.get()) {
return;
}
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
((StableTicksAxis) viewPort.getYAxis()).getSelectionMarker().pseudoClassStateChanged(HOVER_PSEUDO_CLASS, true);
} else {
//FIXME WARNING: Caught 'java.lang.ClassCastException: class java.lang.String cannot be cast to class javafx.scene.paint.Paint
// (java.lang.String is in module java.base of loader 'bootstrap'; javafx.scene.paint.Paint is in unnamed module of loader 'app')'
// while converting value for '-fx-background-color' from inline style on StackedAreaChart@d8b1439[styleClass=chart]
viewPort.setStyle("-fx-background-color: -fx-accent-translucide;");
}
}));
viewPort.setOnDragExited(getBindingManager().registerHandler(event -> {
if (closed.get()) {
return;
}
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
((StableTicksAxis) viewPort.getYAxis()).getSelectionMarker().pseudoClassStateChanged(HOVER_PSEUDO_CLASS, false);
} else {
viewPort.setStyle("-fx-background-color: -binjr-pane-background-color;");
}
}));
// Add buttons to chart axis
Button closeButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Close")
.setTooltip("Remove this chart from the worksheet.")
.setStyleClass("exit")
.setIconStyleClass("cross-icon", "small-icon")
.setAction(event -> warnAndRemoveChart(currentChart))
.bind(Button::disableProperty, Bindings.createBooleanBinding(() -> worksheet.getCharts().size() > 1, worksheet.getCharts()).not())
.build(Button::new);
ToggleButton editButton = new ToolButtonBuilder<ToggleButton>(getBindingManager())
.setText("Settings")
.setTooltip("Edit the chart's settings")
.setStyleClass("dialog-button")
.setIconStyleClass("settings-icon", "small-icon")
.bindBidirectionnal(ToggleButton::selectedProperty, currentChart.showPropertiesProperty())
.build(ToggleButton::new);
var toolBar = new HBox(editButton, closeButton);
toolBar.getStyleClass().add("worksheet-tool-bar");
toolBar.visibleProperty().bind(yAxis.getSelectionMarker().hoverProperty());
yAxis.getSelectionMarker().setOnDragDetected(getBindingManager().registerHandler(event -> {
Dragboard db = viewPort.startDragAndDrop(TransferMode.MOVE);
db.setDragView(SnapshotUtils.scaledSnapshot(viewPort, Dialogs.getOutputScaleX(root), Dialogs.getOutputScaleY(root)));
ClipboardContent cc = new ClipboardContent();
cc.put(VIEWPORT_DRAG_FORMAT, currentIndex);
db.setContent(cc);
event.consume();
}));
yAxis.getSelectionMarker().setOnDragOver(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(VIEWPORT_DRAG_FORMAT) && currentIndex != (Integer) db.getContent(VIEWPORT_DRAG_FORMAT)) {
event.acceptTransferModes(TransferMode.COPY_OR_MOVE);
event.consume();
}
}));
yAxis.getSelectionMarker().setOnDragDropped(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(VIEWPORT_DRAG_FORMAT)) {
int draggedIndex = (Integer) db.getContent(VIEWPORT_DRAG_FORMAT);
event.setDropCompleted(true);
event.consume();
moveChartOrder(viewPorts.get(draggedIndex).getDataStore(), currentIndex - draggedIndex);
}
}));
yAxis.getSelectionMarker().getChildren().add(toolBar);
}
getBindingManager().bind(selectChartLayout.disableProperty(),
Bindings.createBooleanBinding(() -> worksheet.getCharts().size() > 1, worksheet.getCharts()).not());
var contextMenu = new ContextMenu();
contextMenu.getItems().setAll(Arrays.stream(ChartLayout.values()).map(chartLayout -> {
MenuItem item = new MenuItem(chartLayout.toString());
item.setOnAction(getBindingManager().registerHandler(event -> worksheet.setChartLayout(chartLayout)));
return item;
}).collect(Collectors.toList()));
selectChartLayout.setOnAction(getBindingManager().registerHandler(event -> {
contextMenu.show((Node) event.getSource(), Side.BOTTOM, 0, 0);
}));
this.worksheetTitleBlock = buildTitleBlock();
screenshotCanvas = new VBox();
screenshotCanvas.getStyleClass().add("chart-viewport-parent");
screenshotCanvas.setAlignment(Pos.TOP_LEFT);
screenshotCanvas.getChildren().add(worksheetTitleBlock);
switch (worksheet.getChartLayout()) {
case OVERLAID:
setupOverlayChartLayout(screenshotCanvas);
break;
case STACKED:
setupStackedChartLayout(screenshotCanvas);
break;
}
if (viewPorts.size() > 0) {
getBindingManager().attachListener(worksheet.selectedChartProperty(), (ChangeListener<Integer>) (observable, oldValue, newValue) -> setSelectedChart(newValue));
}
}
private void setSelectedChart(int selectedChartIndex) {
for (int i = 0; i < viewPorts.size(); i++) {
var a = (StableTicksAxis) viewPorts.get(i).getChart().getYAxis();
if (worksheet.getMultiSelectedIndices().contains(i)) {
a.setSelected(true);
} else {
a.setSelected(false);
}
}
ChartViewPort selectedChart;
if (selectedChartIndex > -1 && viewPorts.size() > selectedChartIndex && (selectedChart = viewPorts.get(selectedChartIndex)) != null) {
((StableTicksAxis) selectedChart.getChart().getYAxis()).setSelected(true);
seriesTableContainer.getChildren().clear();
seriesTableContainer.getChildren().add(selectedChart.getSeriesDetailsPane());
if (editButtonsGroup.getSelectedToggle() != null) {
selectedChart.getDataStore().setShowProperties(true);
}
}
}
private void setupOverlayChartLayout(VBox vBox) {
var pane = new AnchorPane();
for (int i = 0; i < viewPorts.size(); i++) {
ChartViewPort v = viewPorts.get(i);
XYChart<ZonedDateTime, Double> chart = v.getChart();
int nbAdditionalCharts = worksheet.getCharts().size() - 1;
DoubleBinding n = Bindings.createDoubleBinding(
() -> viewPorts.stream()
.filter(c -> !c.getChart().equals(chart))
.map(c -> c.getChart().getYAxis().getWidth())
.reduce(Double::sum).orElse(0.0) + (Y_AXIS_SEPARATION * nbAdditionalCharts),
viewPorts.stream().map(c -> c.getChart().getYAxis().widthProperty()).toArray(ReadOnlyDoubleProperty[]::new)
);
HBox hBox = new HBox(chart);
hBox.setPickOnBounds(false);
chart.setPickOnBounds(false);
chart.getChildrenUnmodifiable()
.stream()
.filter(node -> node.getStyleClass().contains("chart-content"))
.findFirst()
.ifPresent(node -> node.setPickOnBounds(false));
hBox.setAlignment(Pos.CENTER_LEFT);
getBindingManager().bind(hBox.prefHeightProperty(), chartParent.heightProperty());
getBindingManager().bind(hBox.prefWidthProperty(), chartParent.widthProperty());
getBindingManager().bind(chart.minWidthProperty(), chartParent.widthProperty().subtract(n));
getBindingManager().bind(chart.prefWidthProperty(), chartParent.widthProperty().subtract(n));
getBindingManager().bind(chart.maxWidthProperty(), chartParent.widthProperty().subtract(n));
if (i == 0) {
chart.getYAxis().setSide(Side.LEFT);
} else {
chart.getYAxis().setSide(Side.RIGHT);
chart.setVerticalZeroLineVisible(false);
chart.setHorizontalZeroLineVisible(false);
chart.setVerticalGridLinesVisible(false);
chart.setHorizontalGridLinesVisible(false);
getBindingManager().bind(chart.translateXProperty(), viewPorts.get(0).getChart().getYAxis().widthProperty());
getBindingManager().bind(chart.getYAxis().translateXProperty(), Bindings.createDoubleBinding(
() -> viewPorts.stream()
.filter(c -> viewPorts.indexOf(c) != 0 && viewPorts.indexOf(c) < viewPorts.indexOf(v))
.map(c -> c.getChart().getYAxis().getWidth())
.reduce(Double::sum).orElse(0.0) + Y_AXIS_SEPARATION * (viewPorts.indexOf(v) - 1),
viewPorts.stream().map(c -> c.getChart().getYAxis().widthProperty()).toArray(ReadOnlyDoubleProperty[]::new)));
}
pane.getChildren().add(hBox);
}
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.RFC_1123_DATE_TIME;
LinkedHashMap<XYChart<ZonedDateTime, Double>, Function<Double, String>> map = new LinkedHashMap<>();
viewPorts.forEach(v -> map.put(v.getChart(), v.getPrefixFormatter()::format));
var crossHair = new XYChartCrosshair<>(map, pane, dateTimeFormatter::format);
viewPorts.forEach(v -> v.setCrosshair(crossHair));
crossHair.onSelectionDone(s -> {
logger.debug(() -> "Applying zoom selection: " + s.toString());
currentState.setSelection(convertSelection(s), true);
});
hCrosshair.selectedProperty().bindBidirectional(userPrefs.horizontalMarkerOn.property());
vCrosshair.selectedProperty().bindBidirectional(userPrefs.verticalMarkerOn.property());
getBindingManager().bind(crossHair.horizontalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() -> userPrefs.shiftPressed.get() || hCrosshair.isSelected(),
hCrosshair.selectedProperty(),
userPrefs.shiftPressed.property()));
getBindingManager().bind(crossHair.verticalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() -> userPrefs.ctrlPressed.get() || vCrosshair.isSelected(),
vCrosshair.selectedProperty(),
userPrefs.ctrlPressed.property()));
vBox.getChildren().add(pane);
chartParent.getChildren().add(vBox);
}
private void setupStackedChartLayout(VBox vBox) {
getBindingManager().bind(vBox.prefHeightProperty(), chartParent.heightProperty());
getBindingManager().bind(vBox.prefWidthProperty(), chartParent.widthProperty());
for (int i = 0; i < viewPorts.size(); i++) {
ChartViewPort v = viewPorts.get(i);
XYChart<ZonedDateTime, Double> chart = v.getChart();
vBox.getChildren().add(chart);
chart.maxHeight(Double.MAX_VALUE);
chart.minHeightProperty().bind(Bindings.createDoubleBinding(
() -> worksheet.isEditModeEnabled() ?
Math.max(worksheet.minChartHeightProperty().doubleValue(), 80)
: Math.max(worksheet.minChartHeightProperty().doubleValue(), 250),
worksheet.editModeEnabledProperty(),
worksheet.minChartHeightProperty()
));
VBox.setVgrow(chart, Priority.ALWAYS);
chart.getYAxis().setSide(Side.LEFT);
chart.getYAxis().setPrefWidth(60.0);
chart.getYAxis().setMinWidth(60.0);
chart.getYAxis().setMaxWidth(60.0);
}
var scrollPane = new ScrollPane(vBox);
scrollPane.setFitToWidth(true);
scrollPane.getStyleClass().add("skinnable-pane-border");
chartParent.getChildren().add(scrollPane);
// setup crosshair
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.RFC_1123_DATE_TIME;
LinkedHashMap<XYChart<ZonedDateTime, Double>, Function<Double, String>> map = new LinkedHashMap<>();
map.put(viewPorts.get(0).getChart(), viewPorts.get(0).getPrefixFormatter()::format);
var crossHair = new XYChartCrosshair<>(map, chartParent, dateTimeFormatter::format);
var nbChartObs = new SimpleIntegerProperty(viewPorts.size());
var crosshairHeightBinding = BooleanBinding.booleanExpression(userPrefs.fullHeightCrosshairMarker.property())
.and(Bindings.greaterThan(nbChartObs, 1));
getBindingManager().bind(crossHair.displayFullHeightMarkerProperty(), crosshairHeightBinding);
viewPorts.get(0).setCrosshair(crossHair);
crossHair.onSelectionDone(s -> {
logger.debug(() -> "Applying zoom selection: " + s.toString());
currentState.setSelection(convertSelection(s), true);
});
getBindingManager().bindBidirectional(hCrosshair.selectedProperty(), userPrefs.horizontalMarkerOn.property());
getBindingManager().bindBidirectional(vCrosshair.selectedProperty(), userPrefs.verticalMarkerOn.property());
getBindingManager().bind(crossHair.horizontalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.shiftPressed.get() || hCrosshair.isSelected(),
hCrosshair.selectedProperty(),
userPrefs.shiftPressed.property()));
getBindingManager().bind(crossHair.verticalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.ctrlPressed.get() || vCrosshair.isSelected(),
vCrosshair.selectedProperty(),
userPrefs.ctrlPressed.property()));
for (int i = 1; i < viewPorts.size(); i++) {
LinkedHashMap<XYChart<ZonedDateTime, Double>, Function<Double, String>> m = new LinkedHashMap<>();
m.put(viewPorts.get(i).getChart(), viewPorts.get(i).getPrefixFormatter()::format);
XYChartCrosshair<ZonedDateTime, Double> ch = new XYChartCrosshair<>(m, chartParent, dateTimeFormatter::format);
ch.displayFullHeightMarkerProperty().bind(crosshairHeightBinding);
ch.onSelectionDone(s -> {
logger.debug(() -> "Applying zoom selection: " + s.toString());
currentState.setSelection(convertSelection(s), true);
});
getBindingManager().bind(ch.horizontalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.shiftPressed.get() || hCrosshair.isSelected(),
hCrosshair.selectedProperty(),
userPrefs.shiftPressed.property()));
getBindingManager().bind(ch.verticalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.ctrlPressed.get() || vCrosshair.isSelected(),
vCrosshair.selectedProperty(),
userPrefs.ctrlPressed.property()));
viewPorts.get(i).setCrosshair(ch);
}
}
private Pane buildTitleBlock() {
VBox titleBlock = new VBox();
titleBlock.getStyleClass().add("worksheet-title-block");
titleBlock.setVisible(false);
titleBlock.setManaged(false);
Label title = new Label();
title.getStyleClass().add("title-text");
title.textProperty().bind(worksheet.nameProperty());
title.setGraphic(ToolButtonBuilder.makeIconNode(Pos.CENTER_LEFT, "chart-icon"));
Label range = new Label();
range.getStyleClass().add("range-text");
range.textProperty().bind(timeRangePicker.textProperty());
range.setGraphic(ToolButtonBuilder.makeIconNode(Pos.CENTER_LEFT, "time-icon"));
titleBlock.getChildren().addAll(title, range);
return titleBlock;
}
@Override
public Property<TimeRange> selectedRangeProperty() {
return this.timeRangePicker.selectedRangeProperty();
}
private void initNavigationPane() {
backButton.setOnAction(getBindingManager().registerHandler(this::handleHistoryBack));
forwardButton.setOnAction(getBindingManager().registerHandler(this::handleHistoryForward));
refreshButton.setOnAction(getBindingManager().registerHandler(this::handleRefresh));
snapshotButton.setOnAction(getBindingManager().registerHandler(this::handleTakeSnapshot));
getBindingManager().bind(backButton.disableProperty(), worksheet.getHistory().backward().emptyProperty());
getBindingManager().bind(forwardButton.disableProperty(), worksheet.getHistory().forward().emptyProperty());
addChartButton.setOnAction(getBindingManager().registerHandler(this::handleAddNewChart));
currentState = new ChartViewportsState(this, worksheet.getFromDateTime(), worksheet.getToDateTime());
timeRangePicker.timeRangeLinkedProperty().bindBidirectional(worksheet.timeRangeLinkedProperty());
timeRangePicker.zoneIdProperty().bindBidirectional(worksheet.timeZoneProperty());
timeRangePicker.initSelectedRange(TimeRange.of(currentState.getStartX(), currentState.getEndX()));
timeRangePicker.setOnSelectedRangeChanged((observable, oldValue, newValue) -> {
currentState.setSelection(currentState.selectTimeRange(newValue.getBeginning(), newValue.getEnd()), true);
});
timeRangePicker.setOnResetInterval(() -> {
try {
return worksheet.getCharts().get(0).getInitialTimeRange();
} catch (Exception e) {
Dialogs.notifyException("Error resetting range", e);
}
return TimeRange.of(ZonedDateTime.now().minusHours(24), ZonedDateTime.now());
});
currentState.timeRangeProperty().addListener((observable, oldValue, newValue) -> {
if (newValue != null) {
timeRangePicker.updateSelectedRange(newValue);
}
});
}
private Map<Chart, XYChartSelection<ZonedDateTime, Double>> convertSelection(Map<XYChart<ZonedDateTime, Double>, XYChartSelection<ZonedDateTime, Double>> selection) {
Map<Chart, XYChartSelection<ZonedDateTime, Double>> result = new HashMap<>();
selection.forEach((xyChart, xyChartSelection) -> {
viewPorts.stream()
.filter(v -> v.getChart().equals(xyChart))
.findFirst()
.ifPresent(viewPort -> result.put(viewPort.getDataStore(), xyChartSelection));
});
return result;
}
private void handleAddNewChart(ActionEvent actionEvent) {
worksheet.getCharts().add(new Chart());
}
private void initTableViewPane() {
for (ChartViewPort currentViewPort : viewPorts) {
currentViewPort.getSeriesTable().getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
CheckBox showAllCheckBox = new CheckBox();
TableColumn<TimeSeriesInfo<Double>, Boolean> visibleColumn = new TableColumn<>();
visibleColumn.setGraphic(showAllCheckBox);
visibleColumn.setSortable(false);
visibleColumn.setResizable(false);
visibleColumn.setPrefWidth(32);
InvalidationListener isVisibleListener = (observable) -> {
boolean andAll = true;
boolean orAll = false;
for (TimeSeriesInfo<Double> t : currentViewPort.getDataStore().getSeries()) {
andAll &= t.isSelected();
orAll |= t.isSelected();
}
showAllCheckBox.setIndeterminate(Boolean.logicalXor(andAll, orAll));
showAllCheckBox.setSelected(andAll);
};
ChangeListener<Boolean> refreshListener = (observable, oldValue, newValue) -> {
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
invalidate(false, false, false);
} else {
plotChart(currentViewPort, false);
}
};
currentViewPort.getDataStore().getSeries().forEach(doubleTimeSeriesInfo -> {
getBindingManager().attachListener(doubleTimeSeriesInfo.selectedProperty(), refreshListener);
getBindingManager().attachListener(doubleTimeSeriesInfo.selectedProperty(), isVisibleListener);
// Explicitly call the listener to initialize the proper status of the checkbox
isVisibleListener.invalidated(null);
});
visibleColumn.setCellValueFactory(p -> p.getValue().selectedProperty());
visibleColumn.setCellFactory(CheckBoxTableCell.forTableColumn(visibleColumn));
showAllCheckBox.setOnAction(getBindingManager().registerHandler(event -> {
ChangeListener<Boolean> r = (observable, oldValue, newValue) -> {
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
invalidate(false, false, false);
} else {
plotChart(currentViewPort, false);
}
};
boolean b = ((CheckBox) event.getSource()).isSelected();
currentViewPort.getDataStore().getSeries().forEach(s -> getBindingManager().detachAllChangeListeners(s.selectedProperty()));
currentViewPort.getDataStore().getSeries().forEach(t -> t.setSelected(b));
r.changed(null, null, null);
currentViewPort.getDataStore().getSeries().forEach(s -> getBindingManager().attachListener(s.selectedProperty(), r));
}));
DecimalFormatTableCellFactory<TimeSeriesInfo<Double>, String> alignRightCellFactory = new DecimalFormatTableCellFactory<>();
alignRightCellFactory.setAlignment(TextAlignment.RIGHT);
TableColumn<TimeSeriesInfo<Double>, Color> colorColumn = new TableColumn<>();
colorColumn.setSortable(false);
colorColumn.setResizable(false);
colorColumn.setPrefWidth(32);
TableColumn<TimeSeriesInfo<Double>, String> nameColumn = new TableColumn<>("Name");
nameColumn.setSortable(false);
nameColumn.setPrefWidth(160);
getBindingManager().bind(nameColumn.editableProperty(), currentViewPort.getDataStore().showPropertiesProperty());
nameColumn.setCellValueFactory(new PropertyValueFactory<>("displayName"));
nameColumn.setCellFactory(TextFieldTableCell.forTableColumn());
nameColumn.setOnEditCommit(getBindingManager().registerHandler(
t -> t.getTableView().getItems().get(
t.getTablePosition().getRow()).setDisplayName(t.getNewValue()))
);
TableColumn<TimeSeriesInfo<Double>, String> minColumn = new TableColumn<>("Min.");
minColumn.setSortable(false);
minColumn.setPrefWidth(75);
minColumn.setCellFactory(alignRightCellFactory);
TableColumn<TimeSeriesInfo<Double>, String> maxColumn = new TableColumn<>("Max.");
maxColumn.setSortable(false);
maxColumn.setPrefWidth(75);
maxColumn.setCellFactory(alignRightCellFactory);
TableColumn<TimeSeriesInfo<Double>, String> avgColumn = new TableColumn<>("Avg.");
avgColumn.setSortable(false);
avgColumn.setPrefWidth(75);
avgColumn.setCellFactory(alignRightCellFactory);
TableColumn<TimeSeriesInfo<Double>, String> currentColumn = new TableColumn<>("Current");
currentColumn.setSortable(false);
currentColumn.setPrefWidth(75);
currentColumn.setCellFactory(alignRightCellFactory);
currentColumn.getStyleClass().add("column-bold-text");
TableColumn<TimeSeriesInfo<Double>, String> pathColumn = new TableColumn<>("Path");
pathColumn.setSortable(false);
pathColumn.setPrefWidth(400);
currentColumn.setVisible(getSelectedViewPort().getCrosshair().isVerticalMarkerVisible());
getBindingManager().attachListener(getSelectedViewPort().getCrosshair().verticalMarkerVisibleProperty(),
(ChangeListener<Boolean>) (observable, oldValue, newValue) -> currentColumn.setVisible(newValue));
pathColumn.setCellValueFactory(p -> new SimpleStringProperty(p.getValue().getBinding().getTreeHierarchy()));
colorColumn.setCellFactory(param -> new ColorTableCell<>(colorColumn));
colorColumn.setCellValueFactory(p -> p.getValue().displayColorProperty());
avgColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> p.getValue().getProcessor() == null ? "NaN" : currentViewPort.getPrefixFormatter().format(p.getValue().getProcessor().getAverageValue()),
p.getValue().processorProperty()));
minColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> p.getValue().getProcessor() == null ? "NaN" : currentViewPort.getPrefixFormatter().format(p.getValue().getProcessor().getMinValue()),
p.getValue().processorProperty()));
maxColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> p.getValue().getProcessor() == null ? "NaN" : currentViewPort.getPrefixFormatter().format(p.getValue().getProcessor().getMaxValue()),
p.getValue().processorProperty()));
currentColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> {
if (p.getValue().getProcessor() == null) {
return "NaN";
}
return currentViewPort.getPrefixFormatter().format(p.getValue()
.getProcessor()
.tryGetNearestValue(getSelectedViewPort().getCrosshair().getCurrentXValue())
.orElse(Double.NaN));
}, getSelectedViewPort().getCrosshair().currentXValueProperty()));
currentViewPort.getSeriesTable().setRowFactory(this::seriesTableRowFactory);
currentViewPort.getSeriesTable().setOnKeyReleased(getBindingManager().registerHandler(event -> {
if (event.getCode().equals(KeyCode.DELETE)) {
removeSelectedBinding(currentViewPort.getSeriesTable());
}
}));
currentViewPort.getSeriesTable().setItems(currentViewPort.getDataStore().getSeries());
currentViewPort.getSeriesTable().getColumns().addAll(visibleColumn, colorColumn, nameColumn, minColumn, maxColumn, avgColumn, currentColumn, pathColumn);
TableViewUtils.autoFillTableWidthWithLastColumn(currentViewPort.getSeriesTable());
TitledPane newPane = new TitledPane(currentViewPort.getDataStore().getName(), currentViewPort.getSeriesTable());
newPane.setMinHeight(90.0);
newPane.setMaxHeight(Double.MAX_VALUE);
newPane.setOnDragOver(getBindingManager().registerHandler(this::handleDragOverWorksheetView));
newPane.setOnDragDropped(getBindingManager().registerHandler(this::handleDragDroppedOnLegendTitledPane));
newPane.setUserData(currentViewPort);
GridPane titleRegion = new GridPane();
titleRegion.setHgap(5);
titleRegion.getColumnConstraints().add(new ColumnConstraints(USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, Priority.NEVER, HPos.LEFT, false));
titleRegion.getColumnConstraints().add(new ColumnConstraints(USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, Priority.ALWAYS, HPos.LEFT, true));
titleRegion.getColumnConstraints().add(new ColumnConstraints(USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, Priority.NEVER, HPos.RIGHT, false));
getBindingManager().bind(titleRegion.minWidthProperty(), newPane.widthProperty().subtract(15));
getBindingManager().bind(titleRegion.maxWidthProperty(), newPane.widthProperty().subtract(15));
Label label = new Label();
getBindingManager().bind(label.textProperty(), currentViewPort.getDataStore().nameProperty());
getBindingManager().bind(label.visibleProperty(), currentViewPort.getDataStore().showPropertiesProperty().not());
HBox editFieldsGroup = new HBox();
DoubleBinding db = Bindings.createDoubleBinding(() -> editFieldsGroup.isVisible() ? USE_COMPUTED_SIZE : 0.0, editFieldsGroup.visibleProperty());
getBindingManager().bind(editFieldsGroup.prefHeightProperty(), db);
getBindingManager().bind(editFieldsGroup.maxHeightProperty(), db);
getBindingManager().bind(editFieldsGroup.minHeightProperty(), db);
getBindingManager().bind(editFieldsGroup.visibleProperty(), currentViewPort.getDataStore().showPropertiesProperty());
editFieldsGroup.setSpacing(5);
TextField chartNameField = new TextField();
chartNameField.textProperty().bindBidirectional(currentViewPort.getDataStore().nameProperty());
TextField unitNameField = new TextField();
unitNameField.textProperty().bindBidirectional(currentViewPort.getDataStore().unitProperty());
ChoiceBox<UnitPrefixes> unitPrefixChoiceBox = new ChoiceBox<>();
unitPrefixChoiceBox.getItems().setAll(UnitPrefixes.values());
unitPrefixChoiceBox.getSelectionModel().select(currentViewPort.getDataStore().getUnitPrefixes());
getBindingManager().bind(currentViewPort.getDataStore().unitPrefixesProperty(), unitPrefixChoiceBox.getSelectionModel().selectedItemProperty());
HBox.setHgrow(chartNameField, Priority.ALWAYS);
titleRegion.setOnMouseClicked(getBindingManager().registerHandler(event -> {
if (event.getClickCount() == 2) {
chartNameField.selectAll();
chartNameField.requestFocus();
currentViewPort.getDataStore().setShowProperties(true);
}
}));
editFieldsGroup.getChildren().addAll(chartNameField, unitNameField, unitPrefixChoiceBox);
// *** Toolbar ***
HBox toolbar = new HBox();
toolbar.getStyleClass().add("title-pane-tool-bar");
toolbar.setAlignment(Pos.CENTER);
Button selectChartButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Select")
.setTooltip("Select a chart")
.setStyleClass("dialog-button")
.setIconStyleClass("hamburger-icon", "small-icon")
.setAction(event -> {
var btn = (Button) event.getSource();
Bounds bounds = btn.getBoundsInLocal();
Bounds screenBounds = btn.localToScreen(bounds);
int x = (int) screenBounds.getMinX();
int y = (int) screenBounds.getMinY();
if (btn.getContextMenu() != null) {
btn.getContextMenu().show(btn, x, y + btn.getHeight());
}
})
.build(Button::new);
ContextMenu menu = new ContextMenu();
selectChartButton.setContextMenu(menu);
ToggleGroup group = new ToggleGroup();
for (int i = 0; i < viewPorts.size(); i++) {
var m = new RadioMenuItem();
final int chartIdx = i;
getBindingManager().bind(m.textProperty(), viewPorts.get(i).getDataStore().nameProperty());
m.setToggleGroup(group);
m.setOnAction(getBindingManager().registerHandler(event -> {
worksheet.setSelectedChart(chartIdx);
}));
menu.getItems().add(m);
if (worksheet.getSelectedChart() == i) {
group.selectToggle(m);
}
}
getBindingManager().attachListener(worksheet.selectedChartProperty(), (ChangeListener<Integer>) (obs, oldVal, newVal) -> {
if (newVal >= 0 && newVal < group.getToggles().size()) {
group.selectToggle(group.getToggles().get(newVal));
}
});
Button closeButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Close")
.setTooltip("Remove this chart from the worksheet.")
.setStyleClass("exit")
.setIconStyleClass("cross-icon", "small-icon")
.setAction(event -> warnAndRemoveChart(currentViewPort.getDataStore()))
.bind(Button::disableProperty, Bindings.createBooleanBinding(() -> worksheet.getCharts().size() > 1, worksheet.getCharts()).not())
.build(Button::new);
ToggleButton editButton = new ToolButtonBuilder<ToggleButton>(getBindingManager())
.setText("Settings")
.setTooltip("Edit the chart's settings")
.setStyleClass("dialog-button")
.setIconStyleClass("settings-icon", "small-icon")
.bindBidirectionnal(ToggleButton::selectedProperty, currentViewPort.getDataStore().showPropertiesProperty())
.build(ToggleButton::new);
editButtonsGroup.getToggles().add(editButton);
Button moveUpButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Up")
.setTooltip("Move the chart up the list.")
.setStyleClass("dialog-button")
.setIconStyleClass("upArrow-icon")
.bind(Node::visibleProperty, currentViewPort.getDataStore().showPropertiesProperty())
.setAction(event -> moveChartOrder(currentViewPort.getDataStore(), -1))
.build(Button::new);
Button moveDownButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Down")
.setTooltip("Move the chart down the list.")
.setStyleClass("dialog-button")
.setIconStyleClass("downArrow-icon")
.bind(Node::visibleProperty, currentViewPort.getDataStore().showPropertiesProperty())
.setAction(event -> moveChartOrder(currentViewPort.getDataStore(), 1))
.build(Button::new);
toolbar.getChildren().addAll(moveUpButton, moveDownButton, editButton, closeButton);
titleRegion.getChildren().addAll(selectChartButton, label, editFieldsGroup, toolbar);
HBox hBox = new HBox();
hBox.setAlignment(Pos.CENTER);
GridPane.setConstraints(selectChartButton, 0, 0, 1, 1, HPos.LEFT, VPos.CENTER);
GridPane.setConstraints(label, 1, 0, 1, 1, HPos.LEFT, VPos.CENTER);
GridPane.setConstraints(editFieldsGroup, 1, 0, 1, 1, HPos.LEFT, VPos.CENTER);
GridPane.setConstraints(toolbar, 2, 0, 1, 1, HPos.RIGHT, VPos.CENTER);
newPane.setCollapsible(false);
newPane.setGraphic(titleRegion);
newPane.setContentDisplay(ContentDisplay.GRAPHIC_ONLY);
newPane.setAnimated(false);
currentViewPort.setSeriesDetailsPane(newPane);
}
getBindingManager().attachListener(editButtonsGroup.selectedToggleProperty(), (ChangeListener<Toggle>) (observable, oldValue, newValue) -> {
if (newValue != null) {
chartProperties.expand();
} else {
chartProperties.collapse();
}
});
chartProperties.setSibling(chartView);
if (editButtonsGroup.getSelectedToggle() != null) {
chartProperties.expand();
}
splitPane.setDividerPositions(worksheet.getDividerPosition());
getBindingManager().bind(worksheet.dividerPositionProperty(), splitPane.getDividers().get(0).positionProperty());
}
@Override
public Optional<ChartViewPort> getAttachedViewport(TitledPane pane) {
if (pane != null && (pane.getUserData() instanceof ChartViewPort chartViewPort)) {
return Optional.of(chartViewPort);
}
return Optional.empty();
}
private void warnAndRemoveChart(Chart currentChart) {
List<Chart> chartsInSelection = new ArrayList<>();
for (int i = 0; i < viewPorts.size(); i++) {
if (worksheet.getMultiSelectedIndices().contains(i)) {
chartsInSelection.add(viewPorts.get(i).getDataStore());
}
}
var chartsToRemove = chartsInSelection.contains(currentChart) ? chartsInSelection : List.of(currentChart);
if (Dialogs.confirmDialog(root, "Are you sure you want to remove chart \"" +
chartsToRemove.stream().map(Chart::getName).collect(Collectors.joining("\", \"")) +
"\"?",
"", ButtonType.YES, ButtonType.NO) == ButtonType.YES) {
worksheet.getCharts().removeAll(chartsToRemove);
}
}
private void moveChartOrder(Chart chart, int pos) {
int idx = worksheet.getCharts().indexOf(chart);
this.preventReload = true;
try {
worksheet.getCharts().remove(chart);
} finally {
this.preventReload = false;
}
worksheet.getCharts().add(idx + pos, chart);
}
private void handleDragOverWorksheetView(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
event.acceptTransferModes(TransferMode.MOVE);
event.consume();
}
}
private void handleDragOverNewChartTarget(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
event.acceptTransferModes(TransferMode.COPY);
event.consume();
}
}
private void handleDragDroppedOnLegendTitledPane(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
TreeView<SourceBinding> treeView = getParentController().getSelectedTreeView();
if (treeView != null) {
TreeItem<SourceBinding> item = treeView.getSelectionModel().getSelectedItem();
if (item != null) {
Stage targetStage = (Stage) ((Node) event.getSource()).getScene().getWindow();
if (targetStage != null) {
targetStage.requestFocus();
}
try {
TitledPane droppedPane = (TitledPane) event.getSource();
ChartViewPort viewPort = (ChartViewPort) droppedPane.getUserData();
addBindings(TreeViewUtils.flattenLeaves(item, true).stream()
.filter(b -> b instanceof TimeSeriesBinding)
.map(b -> (TimeSeriesBinding) b).collect(Collectors.toList()), viewPort.getDataStore());
} catch (Exception e) {
Dialogs.notifyException("Error adding bindings to existing worksheet", e, root);
}
logger.debug("dropped to " + event.toString());
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeItem is null");
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeView is null");
}
event.consume();
}
}
private void handleDragDroppedOnWorksheetView(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
TreeView<SourceBinding> treeView = getParentController().getSelectedTreeView();
if (treeView != null) {
TreeItem<SourceBinding> item = treeView.getSelectionModel().getSelectedItem();
if (item != null) {
Stage targetStage = (Stage) ((Node) event.getSource()).getScene().getWindow();
if (targetStage != null) {
targetStage.requestFocus();
}
Chart targetChart = null;
if (event.getSource() instanceof XYChart<?, ?>) {
for (var v : viewPorts) {
if (v.getChart().equals(event.getSource())) {
targetChart = v.getDataStore();
}
}
}
var items = treeView.getSelectionModel().getSelectedItems();
if (targetChart == null) {
getChartListContextMenu(items).show((Node) event.getTarget(), event.getScreenX(), event.getSceneY());
} else {
addToCurrentWorksheet(items, targetChart);
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeItem is null");
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeView is null");
}
event.consume();
}
}
private void handleDragDroppedONewChartTarget(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
TreeView<SourceBinding> treeView = getParentController().getSelectedTreeView();
if (treeView != null) {
Collection<TreeItem<SourceBinding>> items = treeView.getSelectionModel().getSelectedItems();
if (items != null && !items.isEmpty()) {
Stage targetStage = (Stage) ((Node) event.getSource()).getScene().getWindow();
if (targetStage != null) {
targetStage.requestFocus();
}
// Schedule for later execution in order to let other drag and dropped event to complete before modal dialog gets displayed
Platform.runLater(() -> addToNewChart(items));
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeItem is null");
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeView is null");
}
event.consume();
}
}
private void addToNewChart(Collection<TreeItem<SourceBinding>> treeItems) {
try {
treeItemsAsChartList(treeItems, root).ifPresent(charts -> {
// Set the time range of the whole worksheet to accommodate the new bindings
// if there are no other series present.
if (worksheet.getTotalNumberOfSeries() == 0) {
try {
this.timeRangePicker.selectedRangeProperty().setValue(charts.get(0).getInitialTimeRange());
} catch (DataAdapterException e) {
logger.error("Failed to reset time range", e);
}
}
worksheet.getCharts().addAll(charts);
});
} catch (Exception e) {
Dialogs.notifyException("Error adding bindings to new chart", e, null);
}
}
private void addToCurrentWorksheet(Collection<TreeItem<SourceBinding>> treeItems, Chart targetChart) {
try {
// Schedule for later execution in order to let other drag and dropped event to complete before modal dialog gets displayed
Platform.runLater(() -> {
if (treeItems != null && !treeItems.isEmpty()) {
addBindings(treeItems.stream()
.flatMap(item -> TreeViewUtils.flattenLeaves(item, true).stream())
.collect(Collectors.toList()
), targetChart);
}
});
} catch (Exception e) {
Dialogs.notifyException("Error adding bindings to existing worksheet", e, root);
}
}
@Override
public ContextMenu getChartListContextMenu(final Collection<TreeItem<SourceBinding>> items) {
ContextMenu contextMenu = new ContextMenu(worksheet.getCharts()
.stream()
.map(c -> {
MenuItem m = new MenuItem(c.getName());
m.setOnAction(getBindingManager().registerHandler(e -> addToCurrentWorksheet(items, c)));
return m;
})
.toArray(MenuItem[]::new));
MenuItem newChart = new MenuItem("Add to new chart");
newChart.setOnAction((getBindingManager().registerHandler(event -> {
addToNewChart(new ArrayList<>(items));
})));
contextMenu.getItems().addAll(new SeparatorMenuItem(), newChart);
return contextMenu;
}
@Override
public void close() {
super.close();
if (closed.compareAndSet(false, true)) {
logger.debug(() -> "Closing worksheetController " + this.toString());
currentState.close();
hCrosshair.selectedProperty().unbindBidirectional(userPrefs.horizontalMarkerOn.property());
vCrosshair.selectedProperty().unbindBidirectional(userPrefs.verticalMarkerOn.property());
currentState = null;
IOUtils.closeAll(viewPorts);
viewPorts = null;
timeRangePicker.dispose();
this.worksheet = null;
}
}
@Override
public String getView() {
return WORKSHEET_VIEW_FXML;
}
@Override
public void setReloadRequiredHandler(Consumer<WorksheetController> action) {
ChangeListener<Object> controllerReloadListener = (observable, oldValue, newValue) -> {
if (newValue != null) {
logger.debug(() -> "Reloading worksheet controller because property changed from: " + oldValue + " to " + newValue);
action.accept(this);
this.close();
}
};
getBindingManager().attachListener(worksheet.chartLayoutProperty(), controllerReloadListener);
this.worksheet.getCharts().forEach(c -> {
getBindingManager().attachListener(c.unitPrefixesProperty(), controllerReloadListener);
getBindingManager().attachListener(c.chartTypeProperty(), controllerReloadListener);
});
ListChangeListener<Chart> chartListListener = c -> {
boolean reloadNeeded = false;
while (c.next()) {
if (c.wasPermutated()) {
for (int i = c.getFrom(); i < c.getTo(); ++i) {
// nothing for now
}
} else if (c.wasUpdated()) {
// nothing for now
} else {
if (c.wasAdded()) {
getParentController().getWorkspace().setPresentationMode(false);
List<? extends Chart> added = c.getAddedSubList();
Chart chart = added.get(added.size() - 1);
int chartIndex = worksheet.getCharts().indexOf(chart);
worksheet.setSelectedChart(chartIndex);
reloadNeeded = true;
}
if (c.wasRemoved()) {
worksheet.setSelectedChart(Math.min(c.getList().size() - 1, c.getFrom()));
reloadNeeded = true;
}
}
}
if (!preventReload && reloadNeeded) {
logger.debug(() -> "Reloading worksheet controller because list changed: " + c + " in controller " + this);
action.accept(this);
} else {
logger.debug(() -> "Reload explicitly prevented on change " + c);
}
};
getBindingManager().attachListener(worksheet.getCharts(), chartListListener);
}
private void addBindings(Collection<SourceBinding> sourceBindings, Chart targetChart) {
Collection<TimeSeriesBinding> timeSeriesBindings = new ArrayList<>();
for (var sb : sourceBindings) {
if (sb instanceof TimeSeriesBinding timeSeriesBinding) {
timeSeriesBindings.add(timeSeriesBinding);
}
}
if (timeSeriesBindings.size() >= userPrefs.maxSeriesPerChartBeforeWarning.get().intValue()) {
if (Dialogs.confirmDialog(root,
"This action will add " + timeSeriesBindings.size() + " series on a single chart.",
"Are you sure you want to proceed?",
ButtonType.YES, ButtonType.NO) != ButtonType.YES) {
return;
}
}
InvalidationListener isVisibleListener = (observable) -> {
viewPorts.stream().filter(v -> v.getDataStore().equals(targetChart)).findFirst().ifPresent(v -> {
boolean andAll = true;
boolean orAll = false;
for (TimeSeriesInfo<Double> t : targetChart.getSeries()) {
andAll &= t.isSelected();
orAll |= t.isSelected();
}
CheckBox showAllCheckBox = (CheckBox) v.getSeriesTable().getColumns().get(0).getGraphic();
showAllCheckBox.setIndeterminate(Boolean.logicalXor(andAll, orAll));
showAllCheckBox.setSelected(andAll);
});
};
for (TimeSeriesBinding b : timeSeriesBindings) {
TimeSeriesInfo<Double> newSeries = TimeSeriesInfo.fromBinding(b);
getBindingManager().attachListener(newSeries.selectedProperty(),
(observable, oldValue, newValue) ->
viewPorts.stream()
.filter(v -> v.getDataStore().equals(targetChart))
.findFirst()
.ifPresent(v -> plotChart(v, false))
);
getBindingManager().attachListener(newSeries.selectedProperty(), isVisibleListener);
targetChart.addSeries(newSeries);
// Explicitly call the listener to initialize the proper status of the checkbox
isVisibleListener.invalidated(null);
}
// Set the time range of the whole worksheet to accommodate the new bindings
// if there are no other series present.
if (worksheet.getTotalNumberOfSeries() == timeSeriesBindings.size()) {
try {
this.timeRangePicker.selectedRangeProperty().setValue(targetChart.getInitialTimeRange());
} catch (DataAdapterException e) {
logger.error("Failed to reset time range", e);
}
}
invalidate(false, false, false);
}
private void removeSelectedBinding(TableView<TimeSeriesInfo<Double>> seriesTable) {
List<TimeSeriesInfo<Double>> selected = new ArrayList<>(seriesTable.getSelectionModel().getSelectedItems());
seriesTable.getItems().removeAll(selected);
seriesTable.getSelectionModel().clearSelection();
invalidate(false, false, false);
}
@Override
public void refresh() {
invalidate(false, false, true);
}
@Override
public void navigateBackward() {
worksheet.getHistory().getPrevious().ifPresent(h -> currentState.setSelection(h, false));
}
@Override
public void navigateForward() {
worksheet.getHistory().getNext().ifPresent(h -> currentState.setSelection(h, false));
}
@FXML
private void handleHistoryBack(ActionEvent actionEvent) {
navigateBackward();
}
@FXML
private void handleHistoryForward(ActionEvent actionEvent) {
navigateForward();
}
@FXML
private void handleRefresh(ActionEvent actionEvent) {
this.refresh();
}
@FXML
private void handleTakeSnapshot(ActionEvent actionEvent) {
saveSnapshot();
}
public CompletableFuture<?> invalidate(boolean saveToHistory, boolean dontPlotChart, boolean forceRefresh) {
var p = Profiler.start("Invalidate worksheet: " + getWorksheet().getName() +
" [saveToHistory=" + saveToHistory + ", " +
"dontPlotChart=" + dontPlotChart + ", " +
"forceRefresh=" + forceRefresh + "]", logger::perf);
worksheet.getHistory().setHead(currentState.asSelection(), saveToHistory);
logger.debug(() -> worksheet.getHistory().backward().dump());
if (dontPlotChart) {
return CompletableFuture.completedFuture(null);
}
CompletableFuture<?>[] futurePlots = new CompletableFuture<?>[viewPorts.size()];
for (int i = 0; i < viewPorts.size(); i++) {
futurePlots[i] = plotChart(viewPorts.get(i), forceRefresh);
}
var invalidatedFuture = CompletableFuture.allOf(futurePlots);
invalidatedFuture.whenCompleteAsync((unused, throwable) -> p.close());
return invalidatedFuture;
}
public CompletableFuture<?> plotChart(ChartViewPort viewPort, boolean forceRefresh) {
if (!currentState.get(viewPort.getDataStore()).isPresent()) {
return CompletableFuture.completedFuture(null);
}
XYChartSelection<ZonedDateTime, Double> currentSelection = currentState.get(viewPort.getDataStore()).get().asSelection();
logger.debug(() -> "currentSelection=" + (currentSelection == null ? "null" : currentSelection.toString()));
nbBusyPlotTasks.setValue(nbBusyPlotTasks.get() + 1);
return AsyncTaskManager.getInstance().submit(() -> {
viewPort.getDataStore().fetchDataFromSources(currentSelection.getStartX(), currentSelection.getEndX(), forceRefresh);
return viewPort.getDataStore().getSeries()
.stream()
.filter(series -> {
if (series.getProcessor() == null) {
logger.warn("Series " + series.getDisplayName() + " does not contain any data to plot");
return false;
}
if (!series.isSelected()) {
logger.debug(() -> "Series " + series.getDisplayName() + " is not selected");
return false;
}
return true;
})
.map(ts -> makeXYChartSeries(viewPort.getDataStore(), ts))
.collect(Collectors.toList());
},
event -> {
try {
if (!closed.get()) {
nbBusyPlotTasks.setValue(nbBusyPlotTasks.get() - 1);
viewPort.getChart().getData().setAll((Collection<? extends XYChart.Series<ZonedDateTime, Double>>) event.getSource().getValue());
for (Node n : viewPort.getChart().getChildrenUnmodifiable()) {
if (n instanceof Legend legend) {
int i = 0;
for (Legend.LegendItem legendItem : legend.getItems()) {
legendItem.getSymbol().setStyle("-fx-background-color: " +
colorToRgbaString(viewPort.getDataStore()
.getSeries()
.stream()
.filter(TimeSeriesInfo::isSelected)
.collect(Collectors.toList())
.get(i)
.getDisplayColor()));
i++;
}
}
}
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
// Force a redraw of the charts and their Y Axis considering their proper width.
new DelayedAction(() -> viewPort.getChart().resize(0.0, 0.0), Duration.millis(50)).submit();
}
}
} catch (Exception e) {
Dialogs.notifyException("Unexpected error while plotting data", e, root);
}
},
event -> {
if (!closed.get()) {
nbBusyPlotTasks.setValue(nbBusyPlotTasks.get() - 1);
Dialogs.notifyException("Failed to retrieve data from source", event.getSource().getException(), root);
}
});
}
private XYChart.Series<ZonedDateTime, Double> makeXYChartSeries(Chart currentChart, TimeSeriesInfo<Double> series) {
try (Profiler p = Profiler.start("Building XYChart.Series data for" + series.getDisplayName(), logger::perf)) {
XYChart.Series<ZonedDateTime, Double> newSeries = new XYChart.Series<>();
newSeries.setName(series.getDisplayName());
var r = new Region();
r.setPrefSize(10, 10);
r.setMaxSize(10, 10);
r.setMinSize(10, 10);
r.setBackground(new Background(new BackgroundFill(series.getDisplayColor(), null, null)));
newSeries.setNode(r);
newSeries.getData().setAll(series.getProcessor().getData());
if (currentChart.getChartType() == ChartType.SCATTER) {
for (var data : newSeries.getData()) {
var c = new Circle();
getBindingManager().bind(c.radiusProperty(), currentChart.strokeWidthProperty());
getBindingManager().bind(c.fillProperty(), series.displayColorProperty());
data.setNode(c);
}
} else {
getBindingManager().attachListener(newSeries.nodeProperty(), (ChangeListener<Node>) (node, oldNode, newNode) -> {
if (newNode != null) {
switch (currentChart.getChartType()) {
case AREA:
case STACKED:
ObservableList<Node> children = ((Group) newNode).getChildren();
if (children != null && children.size() >= 1) {
Path stroke = (Path) children.get(1);
Path fill = (Path) children.get(0);
logger.trace(() -> "Setting color of series " + series.getBinding().getLabel() + " to " + series.getDisplayColor());
stroke.visibleProperty().bind(currentChart.showAreaOutlineProperty());
stroke.strokeWidthProperty().bind(currentChart.strokeWidthProperty());
stroke.strokeProperty().bind(series.displayColorProperty());
fill.fillProperty().bind(Bindings.createObjectBinding(
() -> series.getDisplayColor().deriveColor(0.0, 1.0, 1.0, currentChart.getGraphOpacity()),
series.displayColorProperty(),
currentChart.graphOpacityProperty()));
}
break;
case LINE:
Path stroke = (Path) newNode;
logger.trace(() -> "Setting color of series " + series.getBinding().getLabel() + " to " + series.getDisplayColor());
stroke.strokeWidthProperty().bind(currentChart.strokeWidthProperty());
stroke.strokeProperty().bind(series.displayColorProperty());
break;
default:
break;
}
}
});
}
return newSeries;
}
}
@Override
public void saveSnapshot() {
WritableImage snapImg;
boolean wasModeEdit = worksheet.isEditModeEnabled();
try {
// Invalidate chart nodes cache so that it is re-rendered when scaled up
// and not just stretched for snapshot
viewPorts.forEach(v -> v.getChart().setCache(false));
worksheet.setEditModeEnabled(false);
worksheetTitleBlock.setManaged(true);
worksheetTitleBlock.setVisible(true);
navigationToolbar.setManaged(false);
navigationToolbar.setVisible(false);
var scaleX = userPrefs.snapshotOutputScale.get() == SnapshotOutputScale.AUTO ?
Dialogs.getOutputScaleX(root) :
userPrefs.snapshotOutputScale.get().getScaleFactor();
var scaleY = userPrefs.snapshotOutputScale.get() == SnapshotOutputScale.AUTO ?
Dialogs.getOutputScaleY(root) :
userPrefs.snapshotOutputScale.get().getScaleFactor();
snapImg = SnapshotUtils.scaledSnapshot(screenshotCanvas, scaleX, scaleY);
} catch (Exception e) {
Dialogs.notifyException("Failed to create snapshot", e, root);
return;
} finally {
viewPorts.forEach(v -> v.getChart().setCache(true));
worksheet.setEditModeEnabled(wasModeEdit);
navigationToolbar.setManaged(true);
navigationToolbar.setVisible(true);
worksheetTitleBlock.setManaged(false);
worksheetTitleBlock.setVisible(false);
}
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Save SnapShot");
fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("Image Files", "*.png"));
Dialogs.getInitialDir(UserHistory.getInstance().mostRecentSaveFolders).ifPresent(fileChooser::setInitialDirectory);
fileChooser.setInitialFileName(String.format("binjr_snapshot_%s.png", worksheet.getName()));
File selectedFile = fileChooser.showSaveDialog(Dialogs.getStage(root));
if (selectedFile != null) {
try {
if (selectedFile.getParent() != null) {
UserHistory.getInstance().mostRecentSaveFolders.push(selectedFile.getParentFile().toPath());
}
ImageIO.write(
SwingFXUtils.fromFXImage(snapImg, null),
"png",
selectedFile);
} catch (IOException e) {
Dialogs.notifyException("Failed to save snapshot to disk", e, root);
}
}
}
private ChartViewPort getSelectedViewPort() {
var v = viewPorts.get(Math.max(0, Math.min(viewPorts.size() - 1, worksheet.getSelectedChart())));
if (v != null) {
return v;
}
throw new IllegalStateException("Could not retrieve selected viewport on current worksheet");
}
private TableRow<TimeSeriesInfo<Double>> seriesTableRowFactory(TableView<TimeSeriesInfo<Double>> tv) {
TableRow<TimeSeriesInfo<Double>> row = new TableRow<>();
row.setOnDragDetected(getBindingManager().registerHandler(event -> {
if (!row.isEmpty()) {
Integer index = row.getIndex();
Dragboard db = row.startDragAndDrop(TransferMode.MOVE);
db.setDragView(SnapshotUtils.scaledSnapshot(row, Dialogs.getOutputScaleX(root), Dialogs.getOutputScaleY(root)));
ClipboardContent cc = new ClipboardContent();
cc.put(SERIALIZED_MIME_TYPE, index);
db.setContent(cc);
event.consume();
}
}));
row.setOnDragOver(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(SERIALIZED_MIME_TYPE) && row.getIndex() != (Integer) db.getContent(SERIALIZED_MIME_TYPE)) {
event.acceptTransferModes(TransferMode.COPY_OR_MOVE);
event.consume();
}
}));
row.setOnDragDropped(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(SERIALIZED_MIME_TYPE)) {
int draggedIndex = (Integer) db.getContent(SERIALIZED_MIME_TYPE);
TimeSeriesInfo<Double> draggedseries = tv.getItems().remove(draggedIndex);
int dropIndex;
if (row.isEmpty()) {
dropIndex = tv.getItems().size();
} else {
dropIndex = row.getIndex();
}
tv.getItems().add(dropIndex, draggedseries);
event.setDropCompleted(true);
tv.getSelectionModel().clearAndSelect(dropIndex);
invalidate(false, false, false);
event.consume();
}
}));
return row;
}
@Override
public void toggleShowPropertiesPane() {
ChartViewPort currentViewport = getSelectedViewPort();
if (currentViewport != null) {
currentViewport.getDataStore().setShowProperties((editButtonsGroup.getSelectedToggle() == null));
}
}
@Override
public void setShowPropertiesPane(boolean value) {
ChartViewPort currentViewport = getSelectedViewPort();
if (currentViewport != null) {
currentViewport.getDataStore().setShowProperties(value);
}
}
@Override
public List<ChartViewPort> getViewPorts() {
return viewPorts;
}
}
|
binjr-core/src/main/java/eu/binjr/core/controllers/XYChartsWorksheetController.java
|
/*
* Copyright 2016-2021 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.binjr.core.controllers;
import com.sun.javafx.charts.Legend;
import eu.binjr.common.io.IOUtils;
import eu.binjr.common.javafx.charts.*;
import eu.binjr.common.javafx.controls.*;
import eu.binjr.common.logging.Logger;
import eu.binjr.common.logging.Profiler;
import eu.binjr.common.text.NoopPrefixFormatter;
import eu.binjr.core.data.adapters.DataAdapter;
import eu.binjr.core.data.adapters.SourceBinding;
import eu.binjr.core.data.adapters.TimeSeriesBinding;
import eu.binjr.core.data.async.AsyncTaskManager;
import eu.binjr.core.data.exceptions.DataAdapterException;
import eu.binjr.core.data.exceptions.NoAdapterFoundException;
import eu.binjr.core.data.workspace.Chart;
import eu.binjr.core.data.workspace.*;
import eu.binjr.core.dialogs.Dialogs;
import eu.binjr.core.preferences.SnapshotOutputScale;
import eu.binjr.core.preferences.UserHistory;
import eu.binjr.core.preferences.UserPreferences;
import javafx.application.Platform;
import javafx.beans.InvalidationListener;
import javafx.beans.binding.Bindings;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.binding.DoubleBinding;
import javafx.beans.property.*;
import javafx.beans.value.ChangeListener;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.css.PseudoClass;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.*;
import javafx.scene.CacheHint;
import javafx.scene.Group;
import javafx.scene.Node;
import javafx.scene.chart.*;
import javafx.scene.control.*;
import javafx.scene.control.cell.CheckBoxTableCell;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.image.WritableImage;
import javafx.scene.input.*;
import javafx.scene.layout.*;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Path;
import javafx.scene.text.TextAlignment;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.util.Duration;
import org.controlsfx.control.MaskerPane;
import javax.imageio.ImageIO;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import static javafx.scene.layout.Region.USE_COMPUTED_SIZE;
/**
* The controller class for the time series view.
*
* @author Frederic Thevenet
*/
public class XYChartsWorksheetController extends WorksheetController {
public static final String WORKSHEET_VIEW_FXML = "/eu/binjr/views/XYChartsWorksheetView.fxml";
private static final DataFormat SERIALIZED_MIME_TYPE = new DataFormat("application/x-java-serialized-object");
private static final DataFormat VIEWPORT_DRAG_FORMAT = new DataFormat("viewport_drag_format");
private static final Logger logger = Logger.create(XYChartsWorksheetController.class);
private static final double Y_AXIS_SEPARATION = 10;
private static final PseudoClass HOVER_PSEUDO_CLASS = PseudoClass.getPseudoClass("hover");
private final UserPreferences userPrefs = UserPreferences.getInstance();
private final ToggleGroup editButtonsGroup = new ToggleGroup();
private final IntegerProperty nbBusyPlotTasks = new SimpleIntegerProperty(0);
@FXML
public AnchorPane root;
@FXML
Pane newChartDropTarget;
private List<ChartViewPort> viewPorts = new ArrayList<>();
private XYChartsWorksheet worksheet;
private volatile boolean preventReload = false;
private final AtomicBoolean closed = new AtomicBoolean(false);
@FXML
private Pane chartParent;
@FXML
private AnchorPane chartViewport;
@FXML
private AnchorPane chartView;
@FXML
private TextField yMinRange;
@FXML
private TextField yMaxRange;
@FXML
private StackPane seriesTableContainer;
@FXML
private SplitPane splitPane;
@FXML
private Button backButton;
@FXML
private Button forwardButton;
@FXML
private Button refreshButton;
@FXML
private Button snapshotButton;
@FXML
private ToggleButton vCrosshair;
@FXML
private ToggleButton hCrosshair;
@FXML
private Button addChartButton;
@FXML
private MaskerPane worksheetMaskerPane;
@FXML
private ContextMenu seriesListMenu;
@FXML
private Button selectChartLayout;
@FXML
private TimeRangePicker timeRangePicker;
@FXML
private AnchorPane chartsLegendsPane;
@FXML
private DrawerPane chartProperties;
@FXML
private ToolBar chartsToolbar;
@FXML
private HBox navigationToolbar;
private ChartViewportsState currentState;
private Pane worksheetTitleBlock;
private VBox screenshotCanvas;
private Profiler worksheetRefreshProfiler = null;
public XYChartsWorksheetController(MainViewController parentController, XYChartsWorksheet worksheet, Collection<DataAdapter<Double>> sourcesAdapters)
throws NoAdapterFoundException {
super(parentController);
this.worksheet = worksheet;
// Attach bindings
for (Chart chart : worksheet.getCharts()) {
for (TimeSeriesInfo<Double> s : chart.getSeries()) {
UUID id = s.getBinding().getAdapterId();
DataAdapter<Double> da = sourcesAdapters
.stream()
.filter(a -> (id != null && a != null && a.getId() != null) && id.equals(a.getId()))
.findAny()
.orElseThrow(() -> new NoAdapterFoundException("Failed to find a valid adapter with id " + (id != null ? id.toString() : "null")));
s.getBinding().setAdapter(da);
}
}
}
private static String colorToRgbaString(Color c) {
return String.format("rgba(%d,%d,%d,%f)", Math.round(c.getRed() * 255), Math.round(c.getGreen() * 255), Math.round(c.getBlue() * 255), c.getOpacity());
}
private Optional<List<Chart>> treeItemsAsChartList(Collection<TreeItem<SourceBinding>> treeItems, Node dlgRoot) {
var charts = new ArrayList<Chart>();
var totalBindings = 0;
for (var treeItem : treeItems) {
for (var t : TreeViewUtils.splitAboveLeaves(treeItem, true)) {
if (t.getValue() instanceof TimeSeriesBinding binding) {
Chart chart = new Chart(
binding.getLegend(),
binding.getGraphType(),
binding.getUnitName(),
binding.getUnitPrefix()
);
for (var b : TreeViewUtils.flattenLeaves(t)) {
if (b instanceof TimeSeriesBinding leafBinding) {
chart.addSeries(TimeSeriesInfo.fromBinding(leafBinding));
totalBindings++;
}
}
charts.add(chart);
}
}
}
if (totalBindings >= UserPreferences.getInstance().maxSeriesPerChartBeforeWarning.get().intValue()) {
if (Dialogs.confirmDialog(dlgRoot,
"This action will add " + totalBindings + " series on a single worksheet.",
"Are you sure you want to proceed?",
ButtonType.YES, ButtonType.NO) != ButtonType.YES) {
return Optional.empty();
}
}
return Optional.of(charts);
}
private ChartPropertiesController buildChartPropertiesController(Chart chart) throws IOException {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/eu/binjr/views/ChartPropertiesView.fxml"));
ChartPropertiesController propertiesController = new ChartPropertiesController(worksheet, chart);
loader.setController(propertiesController);
Pane settingsPane = loader.load();
chartProperties.getChildren().add(settingsPane);
AnchorPane.setRightAnchor(settingsPane, 0.0);
AnchorPane.setBottomAnchor(settingsPane, 0.0);
AnchorPane.setTopAnchor(settingsPane, 0.0);
AnchorPane.setLeftAnchor(settingsPane, 0.0);
settingsPane.getStyleClass().add("toolPane");
return propertiesController;
}
@Override
public Worksheet getWorksheet() {
return this.worksheet;
}
@Override
public void initialize(URL location, ResourceBundle resources) {
assert root != null : "fx:id\"root\" was not injected!";
assert chartParent != null : "fx:id\"chartParent\" was not injected!";
assert seriesTableContainer != null : "fx:id\"seriesTableContainer\" was not injected!";
assert backButton != null : "fx:id\"backButton\" was not injected!";
assert forwardButton != null : "fx:id\"forwardButton\" was not injected!";
assert refreshButton != null : "fx:id\"refreshButton\" was not injected!";
assert vCrosshair != null : "fx:id\"vCrosshair\" was not injected!";
assert hCrosshair != null : "fx:id\"hCrosshair\" was not injected!";
assert snapshotButton != null : "fx:id\"snapshotButton\" was not injected!";
try {
getBindingManager().bind(worksheetMaskerPane.visibleProperty(), nbBusyPlotTasks.greaterThan(0));
initChartViewPorts();
initNavigationPane();
initTableViewPane();
Platform.runLater(() -> invalidate(false, false, false));
getBindingManager().attachListener(userPrefs.downSamplingEnabled.property(), ((observable, oldValue, newValue) -> refresh()));
getBindingManager().attachListener(userPrefs.downSamplingThreshold.property(), ((observable, oldValue, newValue) -> {
if (userPrefs.downSamplingEnabled.get())
refresh();
}));
getBindingManager().attachListener(userPrefs.downSamplingAlgorithm.property(), ((observable, oldValue, newValue) -> {
if (userPrefs.downSamplingEnabled.get())
refresh();
}));
newChartDropTarget.setOnDragOver(getBindingManager().registerHandler(this::handleDragOverNewChartTarget));
newChartDropTarget.setOnDragDropped(getBindingManager().registerHandler(this::handleDragDroppedONewChartTarget));
newChartDropTarget.setOnDragEntered(getBindingManager().registerHandler(event -> newChartDropTarget.pseudoClassStateChanged(HOVER_PSEUDO_CLASS, true)));
newChartDropTarget.setOnDragExited(getBindingManager().registerHandler(event -> newChartDropTarget.pseudoClassStateChanged(HOVER_PSEUDO_CLASS, false)));
getBindingManager().bind(newChartDropTarget.managedProperty(), getParentController().treeItemDragAndDropInProgressProperty());
getBindingManager().bind(newChartDropTarget.visibleProperty(), getParentController().treeItemDragAndDropInProgressProperty());
setSelectedChart(worksheet.getSelectedChart());
} catch (Exception e) {
Platform.runLater(() -> Dialogs.notifyException("Error loading worksheet controller", e, root));
}
super.initialize(location, resources);
}
@Override
protected void setEditChartMode(Boolean newValue) {
if (!newValue) {
getBindingManager().suspend(worksheet.dividerPositionProperty());
splitPane.setDividerPositions(1.0);
chartsLegendsPane.setVisible(false);
chartsLegendsPane.setMaxHeight(0.0);
} else {
chartsLegendsPane.setMaxHeight(Double.MAX_VALUE);
chartsLegendsPane.setVisible(true);
splitPane.setDividerPositions(worksheet.getDividerPosition());
getBindingManager().resume(worksheet.dividerPositionProperty());
}
setShowPropertiesPane(newValue);
super.setEditChartMode(newValue);
}
private ZonedDateTimeAxis buildTimeAxis() {
ZonedDateTimeAxis axis = new ZonedDateTimeAxis(worksheet.getTimeZone());
getBindingManager().bind(axis.zoneIdProperty(), worksheet.timeZoneProperty());
axis.setAnimated(false);
axis.setSide(Side.BOTTOM);
return axis;
}
private void initChartViewPorts() throws IOException {
ZonedDateTimeAxis defaultXAxis = buildTimeAxis();
if (worksheet.getCharts().size() == 0) {
worksheet.getCharts().add(new Chart());
}
for (int i = 0; i < worksheet.getCharts().size(); i++) {
final int currentIndex = i;
final Chart currentChart = worksheet.getCharts().get(i);
ZonedDateTimeAxis xAxis;
switch (worksheet.getChartLayout()) {
case OVERLAID:
xAxis = defaultXAxis;
break;
case STACKED:
default:
xAxis = buildTimeAxis();
break;
}
StableTicksAxis<Double> yAxis;
switch (currentChart.getUnitPrefixes()) {
case BINARY:
yAxis = new BinaryStableTicksAxis<>();
break;
case METRIC:
yAxis = new MetricStableTicksAxis<>();
break;
case NONE:
default:
yAxis = new StableTicksAxis<>(new NoopPrefixFormatter(), 10, new double[]{1.0, 2.5, 5.0});
}
yAxis.autoRangingProperty().bindBidirectional(currentChart.autoScaleYAxisProperty());
yAxis.setAnimated(false);
yAxis.setTickSpacing(30);
getBindingManager().bind(yAxis.labelProperty(),
Bindings.createStringBinding(
() -> String.format("%s - %s", currentChart.getName(), currentChart.getUnit()),
currentChart.nameProperty(),
currentChart.unitProperty()));
XYChart<ZonedDateTime, Double> viewPort;
switch (currentChart.getChartType()) {
case AREA:
viewPort = new AreaChart<>(xAxis, yAxis);
((AreaChart) viewPort).setCreateSymbols(false);
break;
case STACKED:
viewPort = new NaNStackedAreaChart<>(xAxis, yAxis);
((StackedAreaChart) viewPort).setCreateSymbols(false);
break;
case SCATTER:
viewPort = new ScatterChart<>(xAxis, yAxis);
break;
case LINE:
default:
viewPort = new LineChart<>(xAxis, yAxis);
((LineChart) viewPort).setCreateSymbols(false);
}
viewPort.setCache(true);
viewPort.setCacheHint(CacheHint.SPEED);
viewPort.setCacheShape(true);
viewPort.setFocusTraversable(true);
viewPort.legendVisibleProperty().bind(worksheet.editModeEnabledProperty()
.not()
.and(Bindings.equal(ChartLayout.STACKED, (ObjectProperty) worksheet.chartLayoutProperty())));
viewPort.setLegendSide(Side.BOTTOM);
viewPort.setAnimated(false);
viewPorts.add(new ChartViewPort(currentChart, viewPort, buildChartPropertiesController(currentChart)));
viewPort.getYAxis().addEventFilter(MouseEvent.MOUSE_CLICKED, getBindingManager().registerHandler(event -> {
worksheet.setSelectedChart(currentIndex, event.isControlDown());
}));
getBindingManager().bind(((StableTicksAxis) viewPort.getYAxis()).selectionMarkerVisibleProperty(), worksheet.editModeEnabledProperty());
viewPort.setOnDragOver(getBindingManager().registerHandler(this::handleDragOverWorksheetView));
viewPort.setOnDragDropped(getBindingManager().registerHandler(this::handleDragDroppedOnWorksheetView));
viewPort.setOnDragEntered(getBindingManager().registerHandler(event -> {
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
((StableTicksAxis) viewPort.getYAxis()).getSelectionMarker().pseudoClassStateChanged(HOVER_PSEUDO_CLASS, true);
} else {
//FIXME WARNING: Caught 'java.lang.ClassCastException: class java.lang.String cannot be cast to class javafx.scene.paint.Paint
// (java.lang.String is in module java.base of loader 'bootstrap'; javafx.scene.paint.Paint is in unnamed module of loader 'app')'
// while converting value for '-fx-background-color' from inline style on StackedAreaChart@d8b1439[styleClass=chart]
viewPort.setStyle("-fx-background-color: -fx-accent-translucide;");
}
}));
viewPort.setOnDragExited(getBindingManager().registerHandler(event -> {
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
((StableTicksAxis) viewPort.getYAxis()).getSelectionMarker().pseudoClassStateChanged(HOVER_PSEUDO_CLASS, false);
} else {
viewPort.setStyle("-fx-background-color: -binjr-pane-background-color;");
}
}));
// Add buttons to chart axis
Button closeButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Close")
.setTooltip("Remove this chart from the worksheet.")
.setStyleClass("exit")
.setIconStyleClass("cross-icon", "small-icon")
.setAction(event -> warnAndRemoveChart(currentChart))
.bind(Button::disableProperty, Bindings.createBooleanBinding(() -> worksheet.getCharts().size() > 1, worksheet.getCharts()).not())
.build(Button::new);
ToggleButton editButton = new ToolButtonBuilder<ToggleButton>(getBindingManager())
.setText("Settings")
.setTooltip("Edit the chart's settings")
.setStyleClass("dialog-button")
.setIconStyleClass("settings-icon", "small-icon")
.bindBidirectionnal(ToggleButton::selectedProperty, currentChart.showPropertiesProperty())
.build(ToggleButton::new);
var toolBar = new HBox(editButton, closeButton);
toolBar.getStyleClass().add("worksheet-tool-bar");
toolBar.visibleProperty().bind(yAxis.getSelectionMarker().hoverProperty());
yAxis.getSelectionMarker().setOnDragDetected(getBindingManager().registerHandler(event -> {
Dragboard db = viewPort.startDragAndDrop(TransferMode.MOVE);
db.setDragView(SnapshotUtils.scaledSnapshot(viewPort, Dialogs.getOutputScaleX(root), Dialogs.getOutputScaleY(root)));
ClipboardContent cc = new ClipboardContent();
cc.put(VIEWPORT_DRAG_FORMAT, currentIndex);
db.setContent(cc);
event.consume();
}));
yAxis.getSelectionMarker().setOnDragOver(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(VIEWPORT_DRAG_FORMAT) && currentIndex != (Integer) db.getContent(VIEWPORT_DRAG_FORMAT)) {
event.acceptTransferModes(TransferMode.COPY_OR_MOVE);
event.consume();
}
}));
yAxis.getSelectionMarker().setOnDragDropped(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(VIEWPORT_DRAG_FORMAT)) {
int draggedIndex = (Integer) db.getContent(VIEWPORT_DRAG_FORMAT);
event.setDropCompleted(true);
event.consume();
moveChartOrder(viewPorts.get(draggedIndex).getDataStore(), currentIndex - draggedIndex);
}
}));
yAxis.getSelectionMarker().getChildren().add(toolBar);
}
getBindingManager().bind(selectChartLayout.disableProperty(),
Bindings.createBooleanBinding(() -> worksheet.getCharts().size() > 1, worksheet.getCharts()).not());
var contextMenu = new ContextMenu();
contextMenu.getItems().setAll(Arrays.stream(ChartLayout.values()).map(chartLayout -> {
MenuItem item = new MenuItem(chartLayout.toString());
item.setOnAction(getBindingManager().registerHandler(event -> worksheet.setChartLayout(chartLayout)));
return item;
}).collect(Collectors.toList()));
selectChartLayout.setOnAction(getBindingManager().registerHandler(event -> {
contextMenu.show((Node) event.getSource(), Side.BOTTOM, 0, 0);
}));
this.worksheetTitleBlock = buildTitleBlock();
screenshotCanvas = new VBox();
screenshotCanvas.getStyleClass().add("chart-viewport-parent");
screenshotCanvas.setAlignment(Pos.TOP_LEFT);
screenshotCanvas.getChildren().add(worksheetTitleBlock);
switch (worksheet.getChartLayout()) {
case OVERLAID:
setupOverlayChartLayout(screenshotCanvas);
break;
case STACKED:
setupStackedChartLayout(screenshotCanvas);
break;
}
if (viewPorts.size() > 0) {
getBindingManager().attachListener(worksheet.selectedChartProperty(), (ChangeListener<Integer>) (observable, oldValue, newValue) -> setSelectedChart(newValue));
}
}
private void setSelectedChart(int selectedChartIndex) {
for (int i = 0; i < viewPorts.size(); i++) {
var a = (StableTicksAxis) viewPorts.get(i).getChart().getYAxis();
if (worksheet.getMultiSelectedIndices().contains(i)) {
a.setSelected(true);
} else {
a.setSelected(false);
}
}
ChartViewPort selectedChart;
if (selectedChartIndex > -1 && viewPorts.size() > selectedChartIndex && (selectedChart = viewPorts.get(selectedChartIndex)) != null) {
((StableTicksAxis) selectedChart.getChart().getYAxis()).setSelected(true);
seriesTableContainer.getChildren().clear();
seriesTableContainer.getChildren().add(selectedChart.getSeriesDetailsPane());
if (editButtonsGroup.getSelectedToggle() != null) {
selectedChart.getDataStore().setShowProperties(true);
}
}
}
private void setupOverlayChartLayout(VBox vBox) {
var pane = new AnchorPane();
for (int i = 0; i < viewPorts.size(); i++) {
ChartViewPort v = viewPorts.get(i);
XYChart<ZonedDateTime, Double> chart = v.getChart();
int nbAdditionalCharts = worksheet.getCharts().size() - 1;
DoubleBinding n = Bindings.createDoubleBinding(
() -> viewPorts.stream()
.filter(c -> !c.getChart().equals(chart))
.map(c -> c.getChart().getYAxis().getWidth())
.reduce(Double::sum).orElse(0.0) + (Y_AXIS_SEPARATION * nbAdditionalCharts),
viewPorts.stream().map(c -> c.getChart().getYAxis().widthProperty()).toArray(ReadOnlyDoubleProperty[]::new)
);
HBox hBox = new HBox(chart);
hBox.setPickOnBounds(false);
chart.setPickOnBounds(false);
chart.getChildrenUnmodifiable()
.stream()
.filter(node -> node.getStyleClass().contains("chart-content"))
.findFirst()
.ifPresent(node -> node.setPickOnBounds(false));
hBox.setAlignment(Pos.CENTER_LEFT);
getBindingManager().bind(hBox.prefHeightProperty(), chartParent.heightProperty());
getBindingManager().bind(hBox.prefWidthProperty(), chartParent.widthProperty());
getBindingManager().bind(chart.minWidthProperty(), chartParent.widthProperty().subtract(n));
getBindingManager().bind(chart.prefWidthProperty(), chartParent.widthProperty().subtract(n));
getBindingManager().bind(chart.maxWidthProperty(), chartParent.widthProperty().subtract(n));
if (i == 0) {
chart.getYAxis().setSide(Side.LEFT);
} else {
chart.getYAxis().setSide(Side.RIGHT);
chart.setVerticalZeroLineVisible(false);
chart.setHorizontalZeroLineVisible(false);
chart.setVerticalGridLinesVisible(false);
chart.setHorizontalGridLinesVisible(false);
getBindingManager().bind(chart.translateXProperty(), viewPorts.get(0).getChart().getYAxis().widthProperty());
getBindingManager().bind(chart.getYAxis().translateXProperty(), Bindings.createDoubleBinding(
() -> viewPorts.stream()
.filter(c -> viewPorts.indexOf(c) != 0 && viewPorts.indexOf(c) < viewPorts.indexOf(v))
.map(c -> c.getChart().getYAxis().getWidth())
.reduce(Double::sum).orElse(0.0) + Y_AXIS_SEPARATION * (viewPorts.indexOf(v) - 1),
viewPorts.stream().map(c -> c.getChart().getYAxis().widthProperty()).toArray(ReadOnlyDoubleProperty[]::new)));
}
pane.getChildren().add(hBox);
}
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.RFC_1123_DATE_TIME;
LinkedHashMap<XYChart<ZonedDateTime, Double>, Function<Double, String>> map = new LinkedHashMap<>();
viewPorts.forEach(v -> map.put(v.getChart(), v.getPrefixFormatter()::format));
var crossHair = new XYChartCrosshair<>(map, pane, dateTimeFormatter::format);
viewPorts.forEach(v -> v.setCrosshair(crossHair));
crossHair.onSelectionDone(s -> {
logger.debug(() -> "Applying zoom selection: " + s.toString());
currentState.setSelection(convertSelection(s), true);
});
hCrosshair.selectedProperty().bindBidirectional(userPrefs.horizontalMarkerOn.property());
vCrosshair.selectedProperty().bindBidirectional(userPrefs.verticalMarkerOn.property());
getBindingManager().bind(crossHair.horizontalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() -> userPrefs.shiftPressed.get() || hCrosshair.isSelected(),
hCrosshair.selectedProperty(),
userPrefs.shiftPressed.property()));
getBindingManager().bind(crossHair.verticalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() -> userPrefs.ctrlPressed.get() || vCrosshair.isSelected(),
vCrosshair.selectedProperty(),
userPrefs.ctrlPressed.property()));
vBox.getChildren().add(pane);
chartParent.getChildren().add(vBox);
}
private void setupStackedChartLayout(VBox vBox) {
getBindingManager().bind(vBox.prefHeightProperty(), chartParent.heightProperty());
getBindingManager().bind(vBox.prefWidthProperty(), chartParent.widthProperty());
for (int i = 0; i < viewPorts.size(); i++) {
ChartViewPort v = viewPorts.get(i);
XYChart<ZonedDateTime, Double> chart = v.getChart();
vBox.getChildren().add(chart);
chart.maxHeight(Double.MAX_VALUE);
chart.minHeightProperty().bind(Bindings.createDoubleBinding(
() -> worksheet.isEditModeEnabled() ?
Math.max(worksheet.minChartHeightProperty().doubleValue(), 80)
: Math.max(worksheet.minChartHeightProperty().doubleValue(), 250),
worksheet.editModeEnabledProperty(),
worksheet.minChartHeightProperty()
));
VBox.setVgrow(chart, Priority.ALWAYS);
chart.getYAxis().setSide(Side.LEFT);
chart.getYAxis().setPrefWidth(60.0);
chart.getYAxis().setMinWidth(60.0);
chart.getYAxis().setMaxWidth(60.0);
}
var scrollPane = new ScrollPane(vBox);
scrollPane.setFitToWidth(true);
scrollPane.getStyleClass().add("skinnable-pane-border");
chartParent.getChildren().add(scrollPane);
// setup crosshair
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.RFC_1123_DATE_TIME;
LinkedHashMap<XYChart<ZonedDateTime, Double>, Function<Double, String>> map = new LinkedHashMap<>();
map.put(viewPorts.get(0).getChart(), viewPorts.get(0).getPrefixFormatter()::format);
var crossHair = new XYChartCrosshair<>(map, chartParent, dateTimeFormatter::format);
var nbChartObs = new SimpleIntegerProperty(viewPorts.size());
var crosshairHeightBinding = BooleanBinding.booleanExpression(userPrefs.fullHeightCrosshairMarker.property())
.and(Bindings.greaterThan(nbChartObs, 1));
getBindingManager().bind(crossHair.displayFullHeightMarkerProperty(), crosshairHeightBinding);
viewPorts.get(0).setCrosshair(crossHair);
crossHair.onSelectionDone(s -> {
logger.debug(() -> "Applying zoom selection: " + s.toString());
currentState.setSelection(convertSelection(s), true);
});
getBindingManager().bindBidirectional(hCrosshair.selectedProperty(), userPrefs.horizontalMarkerOn.property());
getBindingManager().bindBidirectional(vCrosshair.selectedProperty(), userPrefs.verticalMarkerOn.property());
getBindingManager().bind(crossHair.horizontalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.shiftPressed.get() || hCrosshair.isSelected(),
hCrosshair.selectedProperty(),
userPrefs.shiftPressed.property()));
getBindingManager().bind(crossHair.verticalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.ctrlPressed.get() || vCrosshair.isSelected(),
vCrosshair.selectedProperty(),
userPrefs.ctrlPressed.property()));
for (int i = 1; i < viewPorts.size(); i++) {
LinkedHashMap<XYChart<ZonedDateTime, Double>, Function<Double, String>> m = new LinkedHashMap<>();
m.put(viewPorts.get(i).getChart(), viewPorts.get(i).getPrefixFormatter()::format);
XYChartCrosshair<ZonedDateTime, Double> ch = new XYChartCrosshair<>(m, chartParent, dateTimeFormatter::format);
ch.displayFullHeightMarkerProperty().bind(crosshairHeightBinding);
ch.onSelectionDone(s -> {
logger.debug(() -> "Applying zoom selection: " + s.toString());
currentState.setSelection(convertSelection(s), true);
});
getBindingManager().bind(ch.horizontalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.shiftPressed.get() || hCrosshair.isSelected(),
hCrosshair.selectedProperty(),
userPrefs.shiftPressed.property()));
getBindingManager().bind(ch.verticalMarkerVisibleProperty(),
Bindings.createBooleanBinding(() ->
userPrefs.ctrlPressed.get() || vCrosshair.isSelected(),
vCrosshair.selectedProperty(),
userPrefs.ctrlPressed.property()));
viewPorts.get(i).setCrosshair(ch);
}
}
private Pane buildTitleBlock() {
VBox titleBlock = new VBox();
titleBlock.getStyleClass().add("worksheet-title-block");
titleBlock.setVisible(false);
titleBlock.setManaged(false);
Label title = new Label();
title.getStyleClass().add("title-text");
title.textProperty().bind(worksheet.nameProperty());
title.setGraphic(ToolButtonBuilder.makeIconNode(Pos.CENTER_LEFT, "chart-icon"));
Label range = new Label();
range.getStyleClass().add("range-text");
range.textProperty().bind(timeRangePicker.textProperty());
range.setGraphic(ToolButtonBuilder.makeIconNode(Pos.CENTER_LEFT, "time-icon"));
titleBlock.getChildren().addAll(title, range);
return titleBlock;
}
@Override
public Property<TimeRange> selectedRangeProperty() {
return this.timeRangePicker.selectedRangeProperty();
}
private void initNavigationPane() {
backButton.setOnAction(getBindingManager().registerHandler(this::handleHistoryBack));
forwardButton.setOnAction(getBindingManager().registerHandler(this::handleHistoryForward));
refreshButton.setOnAction(getBindingManager().registerHandler(this::handleRefresh));
snapshotButton.setOnAction(getBindingManager().registerHandler(this::handleTakeSnapshot));
getBindingManager().bind(backButton.disableProperty(), worksheet.getHistory().backward().emptyProperty());
getBindingManager().bind(forwardButton.disableProperty(), worksheet.getHistory().forward().emptyProperty());
addChartButton.setOnAction(getBindingManager().registerHandler(this::handleAddNewChart));
currentState = new ChartViewportsState(this, worksheet.getFromDateTime(), worksheet.getToDateTime());
timeRangePicker.timeRangeLinkedProperty().bindBidirectional(worksheet.timeRangeLinkedProperty());
timeRangePicker.zoneIdProperty().bindBidirectional(worksheet.timeZoneProperty());
timeRangePicker.initSelectedRange(TimeRange.of(currentState.getStartX(), currentState.getEndX()));
timeRangePicker.setOnSelectedRangeChanged((observable, oldValue, newValue) -> {
currentState.setSelection(currentState.selectTimeRange(newValue.getBeginning(), newValue.getEnd()), true);
});
timeRangePicker.setOnResetInterval(() -> {
try {
return worksheet.getCharts().get(0).getInitialTimeRange();
} catch (Exception e) {
Dialogs.notifyException("Error resetting range", e);
}
return TimeRange.of(ZonedDateTime.now().minusHours(24), ZonedDateTime.now());
});
currentState.timeRangeProperty().addListener((observable, oldValue, newValue) -> {
if (newValue != null) {
timeRangePicker.updateSelectedRange(newValue);
}
});
}
private Map<Chart, XYChartSelection<ZonedDateTime, Double>> convertSelection(Map<XYChart<ZonedDateTime, Double>, XYChartSelection<ZonedDateTime, Double>> selection) {
Map<Chart, XYChartSelection<ZonedDateTime, Double>> result = new HashMap<>();
selection.forEach((xyChart, xyChartSelection) -> {
viewPorts.stream()
.filter(v -> v.getChart().equals(xyChart))
.findFirst()
.ifPresent(viewPort -> result.put(viewPort.getDataStore(), xyChartSelection));
});
return result;
}
private void handleAddNewChart(ActionEvent actionEvent) {
worksheet.getCharts().add(new Chart());
}
private void initTableViewPane() {
for (ChartViewPort currentViewPort : viewPorts) {
currentViewPort.getSeriesTable().getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
CheckBox showAllCheckBox = new CheckBox();
TableColumn<TimeSeriesInfo<Double>, Boolean> visibleColumn = new TableColumn<>();
visibleColumn.setGraphic(showAllCheckBox);
visibleColumn.setSortable(false);
visibleColumn.setResizable(false);
visibleColumn.setPrefWidth(32);
InvalidationListener isVisibleListener = (observable) -> {
boolean andAll = true;
boolean orAll = false;
for (TimeSeriesInfo<Double> t : currentViewPort.getDataStore().getSeries()) {
andAll &= t.isSelected();
orAll |= t.isSelected();
}
showAllCheckBox.setIndeterminate(Boolean.logicalXor(andAll, orAll));
showAllCheckBox.setSelected(andAll);
};
ChangeListener<Boolean> refreshListener = (observable, oldValue, newValue) -> {
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
invalidate(false, false, false);
} else {
plotChart(currentViewPort, false);
}
};
currentViewPort.getDataStore().getSeries().forEach(doubleTimeSeriesInfo -> {
getBindingManager().attachListener(doubleTimeSeriesInfo.selectedProperty(), refreshListener);
getBindingManager().attachListener(doubleTimeSeriesInfo.selectedProperty(), isVisibleListener);
// Explicitly call the listener to initialize the proper status of the checkbox
isVisibleListener.invalidated(null);
});
visibleColumn.setCellValueFactory(p -> p.getValue().selectedProperty());
visibleColumn.setCellFactory(CheckBoxTableCell.forTableColumn(visibleColumn));
showAllCheckBox.setOnAction(getBindingManager().registerHandler(event -> {
ChangeListener<Boolean> r = (observable, oldValue, newValue) -> {
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
invalidate(false, false, false);
} else {
plotChart(currentViewPort, false);
}
};
boolean b = ((CheckBox) event.getSource()).isSelected();
currentViewPort.getDataStore().getSeries().forEach(s -> getBindingManager().detachAllChangeListeners(s.selectedProperty()));
currentViewPort.getDataStore().getSeries().forEach(t -> t.setSelected(b));
r.changed(null, null, null);
currentViewPort.getDataStore().getSeries().forEach(s -> getBindingManager().attachListener(s.selectedProperty(), r));
}));
DecimalFormatTableCellFactory<TimeSeriesInfo<Double>, String> alignRightCellFactory = new DecimalFormatTableCellFactory<>();
alignRightCellFactory.setAlignment(TextAlignment.RIGHT);
TableColumn<TimeSeriesInfo<Double>, Color> colorColumn = new TableColumn<>();
colorColumn.setSortable(false);
colorColumn.setResizable(false);
colorColumn.setPrefWidth(32);
TableColumn<TimeSeriesInfo<Double>, String> nameColumn = new TableColumn<>("Name");
nameColumn.setSortable(false);
nameColumn.setPrefWidth(160);
getBindingManager().bind(nameColumn.editableProperty(), currentViewPort.getDataStore().showPropertiesProperty());
nameColumn.setCellValueFactory(new PropertyValueFactory<>("displayName"));
nameColumn.setCellFactory(TextFieldTableCell.forTableColumn());
nameColumn.setOnEditCommit(getBindingManager().registerHandler(
t -> t.getTableView().getItems().get(
t.getTablePosition().getRow()).setDisplayName(t.getNewValue()))
);
TableColumn<TimeSeriesInfo<Double>, String> minColumn = new TableColumn<>("Min.");
minColumn.setSortable(false);
minColumn.setPrefWidth(75);
minColumn.setCellFactory(alignRightCellFactory);
TableColumn<TimeSeriesInfo<Double>, String> maxColumn = new TableColumn<>("Max.");
maxColumn.setSortable(false);
maxColumn.setPrefWidth(75);
maxColumn.setCellFactory(alignRightCellFactory);
TableColumn<TimeSeriesInfo<Double>, String> avgColumn = new TableColumn<>("Avg.");
avgColumn.setSortable(false);
avgColumn.setPrefWidth(75);
avgColumn.setCellFactory(alignRightCellFactory);
TableColumn<TimeSeriesInfo<Double>, String> currentColumn = new TableColumn<>("Current");
currentColumn.setSortable(false);
currentColumn.setPrefWidth(75);
currentColumn.setCellFactory(alignRightCellFactory);
currentColumn.getStyleClass().add("column-bold-text");
TableColumn<TimeSeriesInfo<Double>, String> pathColumn = new TableColumn<>("Path");
pathColumn.setSortable(false);
pathColumn.setPrefWidth(400);
currentColumn.setVisible(getSelectedViewPort().getCrosshair().isVerticalMarkerVisible());
getBindingManager().attachListener(getSelectedViewPort().getCrosshair().verticalMarkerVisibleProperty(),
(ChangeListener<Boolean>) (observable, oldValue, newValue) -> currentColumn.setVisible(newValue));
pathColumn.setCellValueFactory(p -> new SimpleStringProperty(p.getValue().getBinding().getTreeHierarchy()));
colorColumn.setCellFactory(param -> new ColorTableCell<>(colorColumn));
colorColumn.setCellValueFactory(p -> p.getValue().displayColorProperty());
avgColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> p.getValue().getProcessor() == null ? "NaN" : currentViewPort.getPrefixFormatter().format(p.getValue().getProcessor().getAverageValue()),
p.getValue().processorProperty()));
minColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> p.getValue().getProcessor() == null ? "NaN" : currentViewPort.getPrefixFormatter().format(p.getValue().getProcessor().getMinValue()),
p.getValue().processorProperty()));
maxColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> p.getValue().getProcessor() == null ? "NaN" : currentViewPort.getPrefixFormatter().format(p.getValue().getProcessor().getMaxValue()),
p.getValue().processorProperty()));
currentColumn.setCellValueFactory(p -> Bindings.createStringBinding(
() -> {
if (p.getValue().getProcessor() == null) {
return "NaN";
}
return currentViewPort.getPrefixFormatter().format(p.getValue()
.getProcessor()
.tryGetNearestValue(getSelectedViewPort().getCrosshair().getCurrentXValue())
.orElse(Double.NaN));
}, getSelectedViewPort().getCrosshair().currentXValueProperty()));
currentViewPort.getSeriesTable().setRowFactory(this::seriesTableRowFactory);
currentViewPort.getSeriesTable().setOnKeyReleased(getBindingManager().registerHandler(event -> {
if (event.getCode().equals(KeyCode.DELETE)) {
removeSelectedBinding(currentViewPort.getSeriesTable());
}
}));
currentViewPort.getSeriesTable().setItems(currentViewPort.getDataStore().getSeries());
currentViewPort.getSeriesTable().getColumns().addAll(visibleColumn, colorColumn, nameColumn, minColumn, maxColumn, avgColumn, currentColumn, pathColumn);
TableViewUtils.autoFillTableWidthWithLastColumn(currentViewPort.getSeriesTable());
TitledPane newPane = new TitledPane(currentViewPort.getDataStore().getName(), currentViewPort.getSeriesTable());
newPane.setMinHeight(90.0);
newPane.setMaxHeight(Double.MAX_VALUE);
newPane.setOnDragOver(getBindingManager().registerHandler(this::handleDragOverWorksheetView));
newPane.setOnDragDropped(getBindingManager().registerHandler(this::handleDragDroppedOnLegendTitledPane));
newPane.setUserData(currentViewPort);
GridPane titleRegion = new GridPane();
titleRegion.setHgap(5);
titleRegion.getColumnConstraints().add(new ColumnConstraints(USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, Priority.NEVER, HPos.LEFT, false));
titleRegion.getColumnConstraints().add(new ColumnConstraints(USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, Priority.ALWAYS, HPos.LEFT, true));
titleRegion.getColumnConstraints().add(new ColumnConstraints(USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, USE_COMPUTED_SIZE, Priority.NEVER, HPos.RIGHT, false));
getBindingManager().bind(titleRegion.minWidthProperty(), newPane.widthProperty().subtract(15));
getBindingManager().bind(titleRegion.maxWidthProperty(), newPane.widthProperty().subtract(15));
Label label = new Label();
getBindingManager().bind(label.textProperty(), currentViewPort.getDataStore().nameProperty());
getBindingManager().bind(label.visibleProperty(), currentViewPort.getDataStore().showPropertiesProperty().not());
HBox editFieldsGroup = new HBox();
DoubleBinding db = Bindings.createDoubleBinding(() -> editFieldsGroup.isVisible() ? USE_COMPUTED_SIZE : 0.0, editFieldsGroup.visibleProperty());
getBindingManager().bind(editFieldsGroup.prefHeightProperty(), db);
getBindingManager().bind(editFieldsGroup.maxHeightProperty(), db);
getBindingManager().bind(editFieldsGroup.minHeightProperty(), db);
getBindingManager().bind(editFieldsGroup.visibleProperty(), currentViewPort.getDataStore().showPropertiesProperty());
editFieldsGroup.setSpacing(5);
TextField chartNameField = new TextField();
chartNameField.textProperty().bindBidirectional(currentViewPort.getDataStore().nameProperty());
TextField unitNameField = new TextField();
unitNameField.textProperty().bindBidirectional(currentViewPort.getDataStore().unitProperty());
ChoiceBox<UnitPrefixes> unitPrefixChoiceBox = new ChoiceBox<>();
unitPrefixChoiceBox.getItems().setAll(UnitPrefixes.values());
unitPrefixChoiceBox.getSelectionModel().select(currentViewPort.getDataStore().getUnitPrefixes());
getBindingManager().bind(currentViewPort.getDataStore().unitPrefixesProperty(), unitPrefixChoiceBox.getSelectionModel().selectedItemProperty());
HBox.setHgrow(chartNameField, Priority.ALWAYS);
titleRegion.setOnMouseClicked(getBindingManager().registerHandler(event -> {
if (event.getClickCount() == 2) {
chartNameField.selectAll();
chartNameField.requestFocus();
currentViewPort.getDataStore().setShowProperties(true);
}
}));
editFieldsGroup.getChildren().addAll(chartNameField, unitNameField, unitPrefixChoiceBox);
// *** Toolbar ***
HBox toolbar = new HBox();
toolbar.getStyleClass().add("title-pane-tool-bar");
toolbar.setAlignment(Pos.CENTER);
Button selectChartButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Select")
.setTooltip("Select a chart")
.setStyleClass("dialog-button")
.setIconStyleClass("hamburger-icon", "small-icon")
.setAction(event -> {
var btn = (Button) event.getSource();
Bounds bounds = btn.getBoundsInLocal();
Bounds screenBounds = btn.localToScreen(bounds);
int x = (int) screenBounds.getMinX();
int y = (int) screenBounds.getMinY();
if (btn.getContextMenu() != null) {
btn.getContextMenu().show(btn, x, y + btn.getHeight());
}
})
.build(Button::new);
ContextMenu menu = new ContextMenu();
selectChartButton.setContextMenu(menu);
ToggleGroup group = new ToggleGroup();
for (int i = 0; i < viewPorts.size(); i++) {
var m = new RadioMenuItem();
final int chartIdx = i;
getBindingManager().bind(m.textProperty(), viewPorts.get(i).getDataStore().nameProperty());
m.setToggleGroup(group);
m.setOnAction(getBindingManager().registerHandler(event -> {
worksheet.setSelectedChart(chartIdx);
}));
menu.getItems().add(m);
if (worksheet.getSelectedChart() == i) {
group.selectToggle(m);
}
}
getBindingManager().attachListener(worksheet.selectedChartProperty(), (ChangeListener<Integer>) (obs, oldVal, newVal) -> {
if (newVal >= 0 && newVal < group.getToggles().size()) {
group.selectToggle(group.getToggles().get(newVal));
}
});
Button closeButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Close")
.setTooltip("Remove this chart from the worksheet.")
.setStyleClass("exit")
.setIconStyleClass("cross-icon", "small-icon")
.setAction(event -> warnAndRemoveChart(currentViewPort.getDataStore()))
.bind(Button::disableProperty, Bindings.createBooleanBinding(() -> worksheet.getCharts().size() > 1, worksheet.getCharts()).not())
.build(Button::new);
ToggleButton editButton = new ToolButtonBuilder<ToggleButton>(getBindingManager())
.setText("Settings")
.setTooltip("Edit the chart's settings")
.setStyleClass("dialog-button")
.setIconStyleClass("settings-icon", "small-icon")
.bindBidirectionnal(ToggleButton::selectedProperty, currentViewPort.getDataStore().showPropertiesProperty())
.build(ToggleButton::new);
editButtonsGroup.getToggles().add(editButton);
Button moveUpButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Up")
.setTooltip("Move the chart up the list.")
.setStyleClass("dialog-button")
.setIconStyleClass("upArrow-icon")
.bind(Node::visibleProperty, currentViewPort.getDataStore().showPropertiesProperty())
.setAction(event -> moveChartOrder(currentViewPort.getDataStore(), -1))
.build(Button::new);
Button moveDownButton = new ToolButtonBuilder<Button>(getBindingManager())
.setText("Down")
.setTooltip("Move the chart down the list.")
.setStyleClass("dialog-button")
.setIconStyleClass("downArrow-icon")
.bind(Node::visibleProperty, currentViewPort.getDataStore().showPropertiesProperty())
.setAction(event -> moveChartOrder(currentViewPort.getDataStore(), 1))
.build(Button::new);
toolbar.getChildren().addAll(moveUpButton, moveDownButton, editButton, closeButton);
titleRegion.getChildren().addAll(selectChartButton, label, editFieldsGroup, toolbar);
HBox hBox = new HBox();
hBox.setAlignment(Pos.CENTER);
GridPane.setConstraints(selectChartButton, 0, 0, 1, 1, HPos.LEFT, VPos.CENTER);
GridPane.setConstraints(label, 1, 0, 1, 1, HPos.LEFT, VPos.CENTER);
GridPane.setConstraints(editFieldsGroup, 1, 0, 1, 1, HPos.LEFT, VPos.CENTER);
GridPane.setConstraints(toolbar, 2, 0, 1, 1, HPos.RIGHT, VPos.CENTER);
newPane.setCollapsible(false);
newPane.setGraphic(titleRegion);
newPane.setContentDisplay(ContentDisplay.GRAPHIC_ONLY);
newPane.setAnimated(false);
currentViewPort.setSeriesDetailsPane(newPane);
}
getBindingManager().attachListener(editButtonsGroup.selectedToggleProperty(), (ChangeListener<Toggle>) (observable, oldValue, newValue) -> {
if (newValue != null) {
chartProperties.expand();
} else {
chartProperties.collapse();
}
});
chartProperties.setSibling(chartView);
if (editButtonsGroup.getSelectedToggle() != null) {
chartProperties.expand();
}
splitPane.setDividerPositions(worksheet.getDividerPosition());
getBindingManager().bind(worksheet.dividerPositionProperty(), splitPane.getDividers().get(0).positionProperty());
}
@Override
public Optional<ChartViewPort> getAttachedViewport(TitledPane pane) {
if (pane != null && (pane.getUserData() instanceof ChartViewPort chartViewPort)) {
return Optional.of(chartViewPort);
}
return Optional.empty();
}
private void warnAndRemoveChart(Chart currentChart) {
List<Chart> chartsInSelection = new ArrayList<>();
for (int i = 0; i < viewPorts.size(); i++) {
if (worksheet.getMultiSelectedIndices().contains(i)) {
chartsInSelection.add(viewPorts.get(i).getDataStore());
}
}
var chartsToRemove = chartsInSelection.contains(currentChart) ? chartsInSelection : List.of(currentChart);
if (Dialogs.confirmDialog(root, "Are you sure you want to remove chart \"" +
chartsToRemove.stream().map(Chart::getName).collect(Collectors.joining("\", \"")) +
"\"?",
"", ButtonType.YES, ButtonType.NO) == ButtonType.YES) {
worksheet.getCharts().removeAll(chartsToRemove);
}
}
private void moveChartOrder(Chart chart, int pos) {
int idx = worksheet.getCharts().indexOf(chart);
this.preventReload = true;
try {
worksheet.getCharts().remove(chart);
} finally {
this.preventReload = false;
}
worksheet.getCharts().add(idx + pos, chart);
}
private void handleDragOverWorksheetView(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
event.acceptTransferModes(TransferMode.MOVE);
event.consume();
}
}
private void handleDragOverNewChartTarget(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
event.acceptTransferModes(TransferMode.COPY);
event.consume();
}
}
private void handleDragDroppedOnLegendTitledPane(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
TreeView<SourceBinding> treeView = getParentController().getSelectedTreeView();
if (treeView != null) {
TreeItem<SourceBinding> item = treeView.getSelectionModel().getSelectedItem();
if (item != null) {
Stage targetStage = (Stage) ((Node) event.getSource()).getScene().getWindow();
if (targetStage != null) {
targetStage.requestFocus();
}
try {
TitledPane droppedPane = (TitledPane) event.getSource();
ChartViewPort viewPort = (ChartViewPort) droppedPane.getUserData();
addBindings(TreeViewUtils.flattenLeaves(item, true).stream()
.filter(b -> b instanceof TimeSeriesBinding)
.map(b -> (TimeSeriesBinding) b).collect(Collectors.toList()), viewPort.getDataStore());
} catch (Exception e) {
Dialogs.notifyException("Error adding bindings to existing worksheet", e, root);
}
logger.debug("dropped to " + event.toString());
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeItem is null");
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeView is null");
}
event.consume();
}
}
private void handleDragDroppedOnWorksheetView(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
TreeView<SourceBinding> treeView = getParentController().getSelectedTreeView();
if (treeView != null) {
TreeItem<SourceBinding> item = treeView.getSelectionModel().getSelectedItem();
if (item != null) {
Stage targetStage = (Stage) ((Node) event.getSource()).getScene().getWindow();
if (targetStage != null) {
targetStage.requestFocus();
}
Chart targetChart = null;
if (event.getSource() instanceof XYChart<?, ?>) {
for (var v : viewPorts) {
if (v.getChart().equals(event.getSource())) {
targetChart = v.getDataStore();
}
}
}
var items = treeView.getSelectionModel().getSelectedItems();
if (targetChart == null) {
getChartListContextMenu(items).show((Node) event.getTarget(), event.getScreenX(), event.getSceneY());
} else {
addToCurrentWorksheet(items, targetChart);
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeItem is null");
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeView is null");
}
event.consume();
}
}
private void handleDragDroppedONewChartTarget(DragEvent event) {
Dragboard db = event.getDragboard();
if (db.hasContent(DataFormat.lookupMimeType(TimeSeriesBinding.MIME_TYPE))) {
TreeView<SourceBinding> treeView = getParentController().getSelectedTreeView();
if (treeView != null) {
Collection<TreeItem<SourceBinding>> items = treeView.getSelectionModel().getSelectedItems();
if (items != null && !items.isEmpty()) {
Stage targetStage = (Stage) ((Node) event.getSource()).getScene().getWindow();
if (targetStage != null) {
targetStage.requestFocus();
}
// Schedule for later execution in order to let other drag and dropped event to complete before modal dialog gets displayed
Platform.runLater(() -> addToNewChart(items));
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeItem is null");
}
} else {
logger.warn("Cannot complete drag and drop operation: selected TreeView is null");
}
event.consume();
}
}
private void addToNewChart(Collection<TreeItem<SourceBinding>> treeItems) {
try {
treeItemsAsChartList(treeItems, root).ifPresent(charts -> {
// Set the time range of the whole worksheet to accommodate the new bindings
// if there are no other series present.
if (worksheet.getTotalNumberOfSeries() == 0) {
try {
this.timeRangePicker.selectedRangeProperty().setValue(charts.get(0).getInitialTimeRange());
} catch (DataAdapterException e) {
logger.error("Failed to reset time range", e);
}
}
worksheet.getCharts().addAll(charts);
});
} catch (Exception e) {
Dialogs.notifyException("Error adding bindings to new chart", e, null);
}
}
private void addToCurrentWorksheet(Collection<TreeItem<SourceBinding>> treeItems, Chart targetChart) {
try {
// Schedule for later execution in order to let other drag and dropped event to complete before modal dialog gets displayed
Platform.runLater(() -> {
if (treeItems != null && !treeItems.isEmpty()) {
addBindings(treeItems.stream()
.flatMap(item -> TreeViewUtils.flattenLeaves(item, true).stream())
.collect(Collectors.toList()
), targetChart);
}
});
} catch (Exception e) {
Dialogs.notifyException("Error adding bindings to existing worksheet", e, root);
}
}
@Override
public ContextMenu getChartListContextMenu(final Collection<TreeItem<SourceBinding>> items) {
ContextMenu contextMenu = new ContextMenu(worksheet.getCharts()
.stream()
.map(c -> {
MenuItem m = new MenuItem(c.getName());
m.setOnAction(getBindingManager().registerHandler(e -> addToCurrentWorksheet(items, c)));
return m;
})
.toArray(MenuItem[]::new));
MenuItem newChart = new MenuItem("Add to new chart");
newChart.setOnAction((getBindingManager().registerHandler(event -> {
addToNewChart(new ArrayList<>(items));
})));
contextMenu.getItems().addAll(new SeparatorMenuItem(), newChart);
return contextMenu;
}
@Override
public void close() {
super.close();
if (closed.compareAndSet(false, true)) {
logger.debug(() -> "Closing worksheetController " + this.toString());
currentState.close();
hCrosshair.selectedProperty().unbindBidirectional(userPrefs.horizontalMarkerOn.property());
vCrosshair.selectedProperty().unbindBidirectional(userPrefs.verticalMarkerOn.property());
currentState = null;
IOUtils.closeAll(viewPorts);
viewPorts = null;
timeRangePicker.dispose();
this.worksheet = null;
}
}
@Override
public String getView() {
return WORKSHEET_VIEW_FXML;
}
@Override
public void setReloadRequiredHandler(Consumer<WorksheetController> action) {
ChangeListener<Object> controllerReloadListener = (observable, oldValue, newValue) -> {
if (newValue != null) {
logger.debug(() -> "Reloading worksheet controller because property changed from: " + oldValue + " to " + newValue);
action.accept(this);
this.close();
}
};
getBindingManager().attachListener(worksheet.chartLayoutProperty(), controllerReloadListener);
this.worksheet.getCharts().forEach(c -> {
getBindingManager().attachListener(c.unitPrefixesProperty(), controllerReloadListener);
getBindingManager().attachListener(c.chartTypeProperty(), controllerReloadListener);
});
ListChangeListener<Chart> chartListListener = c -> {
boolean reloadNeeded = false;
while (c.next()) {
if (c.wasPermutated()) {
for (int i = c.getFrom(); i < c.getTo(); ++i) {
// nothing for now
}
} else if (c.wasUpdated()) {
// nothing for now
} else {
if (c.wasAdded()) {
getParentController().getWorkspace().setPresentationMode(false);
List<? extends Chart> added = c.getAddedSubList();
Chart chart = added.get(added.size() - 1);
int chartIndex = worksheet.getCharts().indexOf(chart);
worksheet.setSelectedChart(chartIndex);
reloadNeeded = true;
}
if (c.wasRemoved()) {
worksheet.setSelectedChart(Math.min(c.getList().size() - 1, c.getFrom()));
reloadNeeded = true;
}
}
}
if (!preventReload && reloadNeeded) {
logger.debug(() -> "Reloading worksheet controller because list changed: " + c + " in controller " + this);
action.accept(this);
} else {
logger.debug(() -> "Reload explicitly prevented on change " + c);
}
};
getBindingManager().attachListener(worksheet.getCharts(), chartListListener);
}
private void addBindings(Collection<SourceBinding> sourceBindings, Chart targetChart) {
Collection<TimeSeriesBinding> timeSeriesBindings = new ArrayList<>();
for (var sb : sourceBindings) {
if (sb instanceof TimeSeriesBinding timeSeriesBinding) {
timeSeriesBindings.add(timeSeriesBinding);
}
}
if (timeSeriesBindings.size() >= userPrefs.maxSeriesPerChartBeforeWarning.get().intValue()) {
if (Dialogs.confirmDialog(root,
"This action will add " + timeSeriesBindings.size() + " series on a single chart.",
"Are you sure you want to proceed?",
ButtonType.YES, ButtonType.NO) != ButtonType.YES) {
return;
}
}
InvalidationListener isVisibleListener = (observable) -> {
viewPorts.stream().filter(v -> v.getDataStore().equals(targetChart)).findFirst().ifPresent(v -> {
boolean andAll = true;
boolean orAll = false;
for (TimeSeriesInfo<Double> t : targetChart.getSeries()) {
andAll &= t.isSelected();
orAll |= t.isSelected();
}
CheckBox showAllCheckBox = (CheckBox) v.getSeriesTable().getColumns().get(0).getGraphic();
showAllCheckBox.setIndeterminate(Boolean.logicalXor(andAll, orAll));
showAllCheckBox.setSelected(andAll);
});
};
for (TimeSeriesBinding b : timeSeriesBindings) {
TimeSeriesInfo<Double> newSeries = TimeSeriesInfo.fromBinding(b);
getBindingManager().attachListener(newSeries.selectedProperty(),
(observable, oldValue, newValue) ->
viewPorts.stream()
.filter(v -> v.getDataStore().equals(targetChart))
.findFirst()
.ifPresent(v -> plotChart(v, false))
);
getBindingManager().attachListener(newSeries.selectedProperty(), isVisibleListener);
targetChart.addSeries(newSeries);
// Explicitly call the listener to initialize the proper status of the checkbox
isVisibleListener.invalidated(null);
}
// Set the time range of the whole worksheet to accommodate the new bindings
// if there are no other series present.
if (worksheet.getTotalNumberOfSeries() == timeSeriesBindings.size()) {
try {
this.timeRangePicker.selectedRangeProperty().setValue(targetChart.getInitialTimeRange());
} catch (DataAdapterException e) {
logger.error("Failed to reset time range", e);
}
}
invalidate(false, false, false);
}
private void removeSelectedBinding(TableView<TimeSeriesInfo<Double>> seriesTable) {
List<TimeSeriesInfo<Double>> selected = new ArrayList<>(seriesTable.getSelectionModel().getSelectedItems());
seriesTable.getItems().removeAll(selected);
seriesTable.getSelectionModel().clearSelection();
invalidate(false, false, false);
}
@Override
public void refresh() {
invalidate(false, false, true);
}
@Override
public void navigateBackward() {
worksheet.getHistory().getPrevious().ifPresent(h -> currentState.setSelection(h, false));
}
@Override
public void navigateForward() {
worksheet.getHistory().getNext().ifPresent(h -> currentState.setSelection(h, false));
}
@FXML
private void handleHistoryBack(ActionEvent actionEvent) {
navigateBackward();
}
@FXML
private void handleHistoryForward(ActionEvent actionEvent) {
navigateForward();
}
@FXML
private void handleRefresh(ActionEvent actionEvent) {
this.refresh();
}
@FXML
private void handleTakeSnapshot(ActionEvent actionEvent) {
saveSnapshot();
}
public CompletableFuture<?> invalidate(boolean saveToHistory, boolean dontPlotChart, boolean forceRefresh) {
var p = Profiler.start("Invalidate worksheet: " + getWorksheet().getName() +
" [saveToHistory=" + saveToHistory + ", " +
"dontPlotChart=" + dontPlotChart + ", " +
"forceRefresh=" + forceRefresh + "]", logger::perf);
worksheet.getHistory().setHead(currentState.asSelection(), saveToHistory);
logger.debug(() -> worksheet.getHistory().backward().dump());
if (dontPlotChart) {
return CompletableFuture.completedFuture(null);
}
CompletableFuture<?>[] futurePlots = new CompletableFuture<?>[viewPorts.size()];
for (int i = 0; i < viewPorts.size(); i++) {
futurePlots[i] = plotChart(viewPorts.get(i), forceRefresh);
}
var invalidatedFuture = CompletableFuture.allOf(futurePlots);
invalidatedFuture.whenCompleteAsync((unused, throwable) -> p.close());
return invalidatedFuture;
}
public CompletableFuture<?> plotChart(ChartViewPort viewPort, boolean forceRefresh) {
if (!currentState.get(viewPort.getDataStore()).isPresent()) {
return CompletableFuture.completedFuture(null);
}
XYChartSelection<ZonedDateTime, Double> currentSelection = currentState.get(viewPort.getDataStore()).get().asSelection();
logger.debug(() -> "currentSelection=" + (currentSelection == null ? "null" : currentSelection.toString()));
nbBusyPlotTasks.setValue(nbBusyPlotTasks.get() + 1);
return AsyncTaskManager.getInstance().submit(() -> {
viewPort.getDataStore().fetchDataFromSources(currentSelection.getStartX(), currentSelection.getEndX(), forceRefresh);
return viewPort.getDataStore().getSeries()
.stream()
.filter(series -> {
if (series.getProcessor() == null) {
logger.warn("Series " + series.getDisplayName() + " does not contain any data to plot");
return false;
}
if (!series.isSelected()) {
logger.debug(() -> "Series " + series.getDisplayName() + " is not selected");
return false;
}
return true;
})
.map(ts -> makeXYChartSeries(viewPort.getDataStore(), ts))
.collect(Collectors.toList());
},
event -> {
try {
if (!closed.get()) {
nbBusyPlotTasks.setValue(nbBusyPlotTasks.get() - 1);
viewPort.getChart().getData().setAll((Collection<? extends XYChart.Series<ZonedDateTime, Double>>) event.getSource().getValue());
for (Node n : viewPort.getChart().getChildrenUnmodifiable()) {
if (n instanceof Legend legend) {
int i = 0;
for (Legend.LegendItem legendItem : legend.getItems()) {
legendItem.getSymbol().setStyle("-fx-background-color: " +
colorToRgbaString(viewPort.getDataStore()
.getSeries()
.stream()
.filter(TimeSeriesInfo::isSelected)
.collect(Collectors.toList())
.get(i)
.getDisplayColor()));
i++;
}
}
}
if (worksheet.getChartLayout() == ChartLayout.OVERLAID) {
// Force a redraw of the charts and their Y Axis considering their proper width.
new DelayedAction(() -> viewPort.getChart().resize(0.0, 0.0), Duration.millis(50)).submit();
}
}
} catch (Exception e) {
Dialogs.notifyException("Unexpected error while plotting data", e, root);
}
},
event -> {
if (!closed.get()) {
nbBusyPlotTasks.setValue(nbBusyPlotTasks.get() - 1);
Dialogs.notifyException("Failed to retrieve data from source", event.getSource().getException(), root);
}
});
}
private XYChart.Series<ZonedDateTime, Double> makeXYChartSeries(Chart currentChart, TimeSeriesInfo<Double> series) {
try (Profiler p = Profiler.start("Building XYChart.Series data for" + series.getDisplayName(), logger::perf)) {
XYChart.Series<ZonedDateTime, Double> newSeries = new XYChart.Series<>();
newSeries.setName(series.getDisplayName());
var r = new Region();
r.setPrefSize(10, 10);
r.setMaxSize(10, 10);
r.setMinSize(10, 10);
r.setBackground(new Background(new BackgroundFill(series.getDisplayColor(), null, null)));
newSeries.setNode(r);
newSeries.getData().setAll(series.getProcessor().getData());
if (currentChart.getChartType() == ChartType.SCATTER) {
for (var data : newSeries.getData()) {
var c = new Circle();
getBindingManager().bind(c.radiusProperty(), currentChart.strokeWidthProperty());
getBindingManager().bind(c.fillProperty(), series.displayColorProperty());
data.setNode(c);
}
} else {
getBindingManager().attachListener(newSeries.nodeProperty(), (ChangeListener<Node>) (node, oldNode, newNode) -> {
if (newNode != null) {
switch (currentChart.getChartType()) {
case AREA:
case STACKED:
ObservableList<Node> children = ((Group) newNode).getChildren();
if (children != null && children.size() >= 1) {
Path stroke = (Path) children.get(1);
Path fill = (Path) children.get(0);
logger.trace(() -> "Setting color of series " + series.getBinding().getLabel() + " to " + series.getDisplayColor());
stroke.visibleProperty().bind(currentChart.showAreaOutlineProperty());
stroke.strokeWidthProperty().bind(currentChart.strokeWidthProperty());
stroke.strokeProperty().bind(series.displayColorProperty());
fill.fillProperty().bind(Bindings.createObjectBinding(
() -> series.getDisplayColor().deriveColor(0.0, 1.0, 1.0, currentChart.getGraphOpacity()),
series.displayColorProperty(),
currentChart.graphOpacityProperty()));
}
break;
case LINE:
Path stroke = (Path) newNode;
logger.trace(() -> "Setting color of series " + series.getBinding().getLabel() + " to " + series.getDisplayColor());
stroke.strokeWidthProperty().bind(currentChart.strokeWidthProperty());
stroke.strokeProperty().bind(series.displayColorProperty());
break;
default:
break;
}
}
});
}
return newSeries;
}
}
@Override
public void saveSnapshot() {
WritableImage snapImg;
boolean wasModeEdit = worksheet.isEditModeEnabled();
try {
// Invalidate chart nodes cache so that it is re-rendered when scaled up
// and not just stretched for snapshot
viewPorts.forEach(v -> v.getChart().setCache(false));
worksheet.setEditModeEnabled(false);
worksheetTitleBlock.setManaged(true);
worksheetTitleBlock.setVisible(true);
navigationToolbar.setManaged(false);
navigationToolbar.setVisible(false);
var scaleX = userPrefs.snapshotOutputScale.get() == SnapshotOutputScale.AUTO ?
Dialogs.getOutputScaleX(root) :
userPrefs.snapshotOutputScale.get().getScaleFactor();
var scaleY = userPrefs.snapshotOutputScale.get() == SnapshotOutputScale.AUTO ?
Dialogs.getOutputScaleY(root) :
userPrefs.snapshotOutputScale.get().getScaleFactor();
snapImg = SnapshotUtils.scaledSnapshot(screenshotCanvas, scaleX, scaleY);
} catch (Exception e) {
Dialogs.notifyException("Failed to create snapshot", e, root);
return;
} finally {
viewPorts.forEach(v -> v.getChart().setCache(true));
worksheet.setEditModeEnabled(wasModeEdit);
navigationToolbar.setManaged(true);
navigationToolbar.setVisible(true);
worksheetTitleBlock.setManaged(false);
worksheetTitleBlock.setVisible(false);
}
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Save SnapShot");
fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("Image Files", "*.png"));
Dialogs.getInitialDir(UserHistory.getInstance().mostRecentSaveFolders).ifPresent(fileChooser::setInitialDirectory);
fileChooser.setInitialFileName(String.format("binjr_snapshot_%s.png", worksheet.getName()));
File selectedFile = fileChooser.showSaveDialog(Dialogs.getStage(root));
if (selectedFile != null) {
try {
if (selectedFile.getParent() != null) {
UserHistory.getInstance().mostRecentSaveFolders.push(selectedFile.getParentFile().toPath());
}
ImageIO.write(
SwingFXUtils.fromFXImage(snapImg, null),
"png",
selectedFile);
} catch (IOException e) {
Dialogs.notifyException("Failed to save snapshot to disk", e, root);
}
}
}
private ChartViewPort getSelectedViewPort() {
var v = viewPorts.get(Math.max(0, Math.min(viewPorts.size() - 1, worksheet.getSelectedChart())));
if (v != null) {
return v;
}
throw new IllegalStateException("Could not retrieve selected viewport on current worksheet");
}
private TableRow<TimeSeriesInfo<Double>> seriesTableRowFactory(TableView<TimeSeriesInfo<Double>> tv) {
TableRow<TimeSeriesInfo<Double>> row = new TableRow<>();
row.setOnDragDetected(getBindingManager().registerHandler(event -> {
if (!row.isEmpty()) {
Integer index = row.getIndex();
Dragboard db = row.startDragAndDrop(TransferMode.MOVE);
db.setDragView(SnapshotUtils.scaledSnapshot(row, Dialogs.getOutputScaleX(root), Dialogs.getOutputScaleY(root)));
ClipboardContent cc = new ClipboardContent();
cc.put(SERIALIZED_MIME_TYPE, index);
db.setContent(cc);
event.consume();
}
}));
row.setOnDragOver(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(SERIALIZED_MIME_TYPE) && row.getIndex() != (Integer) db.getContent(SERIALIZED_MIME_TYPE)) {
event.acceptTransferModes(TransferMode.COPY_OR_MOVE);
event.consume();
}
}));
row.setOnDragDropped(getBindingManager().registerHandler(event -> {
Dragboard db = event.getDragboard();
if (db.hasContent(SERIALIZED_MIME_TYPE)) {
int draggedIndex = (Integer) db.getContent(SERIALIZED_MIME_TYPE);
TimeSeriesInfo<Double> draggedseries = tv.getItems().remove(draggedIndex);
int dropIndex;
if (row.isEmpty()) {
dropIndex = tv.getItems().size();
} else {
dropIndex = row.getIndex();
}
tv.getItems().add(dropIndex, draggedseries);
event.setDropCompleted(true);
tv.getSelectionModel().clearAndSelect(dropIndex);
invalidate(false, false, false);
event.consume();
}
}));
return row;
}
@Override
public void toggleShowPropertiesPane() {
ChartViewPort currentViewport = getSelectedViewPort();
if (currentViewport != null) {
currentViewport.getDataStore().setShowProperties((editButtonsGroup.getSelectedToggle() == null));
}
}
@Override
public void setShowPropertiesPane(boolean value) {
ChartViewPort currentViewport = getSelectedViewPort();
if (currentViewport != null) {
currentViewport.getDataStore().setShowProperties(value);
}
}
@Override
public List<ChartViewPort> getViewPorts() {
return viewPorts;
}
}
|
Inhibit drag&drop event handlers on closed worksheets
|
binjr-core/src/main/java/eu/binjr/core/controllers/XYChartsWorksheetController.java
|
Inhibit drag&drop event handlers on closed worksheets
|
|
Java
|
apache-2.0
|
f8b61f130eaa3612167c3b441d1c44c0b6ac07d9
| 0
|
bd-dev-mobileum/presto,bd-dev-mobileum/presto,bd-dev-mobileum/presto,bd-dev-mobileum/presto,bd-dev-mobileum/presto
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution;
import com.facebook.presto.util.SetThreadName;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Objects;
import com.google.common.base.Ticker;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.concurrent.ThreadPoolExecutorMBean;
import io.airlift.log.Logger;
import io.airlift.stats.DistributionStat;
import io.airlift.units.Duration;
import org.weakref.jmx.Managed;
import org.weakref.jmx.Nested;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.NotThreadSafe;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicLongArray;
import static com.facebook.presto.util.Threads.threadsNamed;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
@ThreadSafe
public class TaskExecutor
{
private static final Logger log = Logger.get(TaskExecutor.class);
// each task is guaranteed a minimum number of tasks
private static final int GUARANTEED_SPLITS_PER_TASK = 3;
// each time we run a split, run it for this length before returning to the pool
private static final Duration SPLIT_RUN_QUANTA = new Duration(1, TimeUnit.SECONDS);
private static final AtomicLong NEXT_RUNNER_ID = new AtomicLong();
private static final AtomicLong NEXT_WORKER_ID = new AtomicLong();
private final ExecutorService executor;
private final ThreadPoolExecutorMBean executorMBean;
private final int runnerThreads;
private final int minimumNumberOfTasks;
private final Ticker ticker;
@GuardedBy("this")
private final List<TaskHandle> tasks;
private final Set<PrioritizedSplitRunner> allSplits = new HashSet<>();
private final PriorityBlockingQueue<PrioritizedSplitRunner> pendingSplits;
private final Set<PrioritizedSplitRunner> runningSplits = Sets.newSetFromMap(new ConcurrentHashMap<PrioritizedSplitRunner, Boolean>());
private final Set<PrioritizedSplitRunner> blockedSplits = Sets.newSetFromMap(new ConcurrentHashMap<PrioritizedSplitRunner, Boolean>());
private final AtomicLongArray completedTasksPerLevel = new AtomicLongArray(5);
private final DistributionStat queuedTime = new DistributionStat();
private final DistributionStat wallTime = new DistributionStat();
private boolean closed;
@Inject
public TaskExecutor(TaskManagerConfig config)
{
this(checkNotNull(config, "config is null").getMaxShardProcessorThreads());
}
public TaskExecutor(int runnerThreads)
{
this(runnerThreads, Ticker.systemTicker());
}
@VisibleForTesting
public TaskExecutor(int runnerThreads, Ticker ticker)
{
checkArgument(runnerThreads > 0, "runnerThreads must be at least 1");
// we manages thread pool size directly, so create an unlimited pool
this.executor = Executors.newCachedThreadPool(threadsNamed("task-processor-%d"));
this.executorMBean = new ThreadPoolExecutorMBean((ThreadPoolExecutor) executor);
this.runnerThreads = runnerThreads;
this.ticker = checkNotNull(ticker, "ticker is null");
// we assume we need at least two tasks per runner thread to keep the system busy
this.minimumNumberOfTasks = 2 * this.runnerThreads;
this.pendingSplits = new PriorityBlockingQueue<>(Runtime.getRuntime().availableProcessors() * 10);
this.tasks = new LinkedList<>();
}
@PostConstruct
public synchronized void start()
{
checkState(!closed, "TaskExecutor is closed");
for (int i = 0; i < runnerThreads; i++) {
addRunnerThread();
}
}
@PreDestroy
public synchronized void stop()
{
closed = true;
executor.shutdownNow();
}
@Override
public synchronized String toString()
{
return Objects.toStringHelper(this)
.add("runnerThreads", runnerThreads)
.add("allSplits", allSplits.size())
.add("pendingSplits", pendingSplits.size())
.add("runningSplits", runningSplits.size())
.add("blockedSplits", blockedSplits.size())
.toString();
}
private synchronized void addRunnerThread()
{
try {
executor.execute(new Runner());
}
catch (RejectedExecutionException ignored) {
}
}
public synchronized TaskHandle addTask(TaskId taskId)
{
TaskHandle taskHandle = new TaskHandle(checkNotNull(taskId, "taskId is null"));
tasks.add(taskHandle);
return taskHandle;
}
public synchronized void removeTask(TaskHandle taskHandle)
{
taskHandle.destroy();
tasks.remove(taskHandle);
// record completed stats
long threadUsageNanos = taskHandle.getThreadUsageNanos();
int priorityLevel = calculatePriorityLevel(threadUsageNanos);
completedTasksPerLevel.incrementAndGet(priorityLevel);
}
public synchronized ListenableFuture<?> enqueueSplit(TaskHandle taskHandle, SplitRunner taskSplit)
{
PrioritizedSplitRunner prioritizedSplitRunner = new PrioritizedSplitRunner(taskHandle, taskSplit, ticker);
taskHandle.addSplit(prioritizedSplitRunner);
scheduleTaskIfNecessary(taskHandle);
addNewEntrants();
return prioritizedSplitRunner.getFinishedFuture();
}
public synchronized ListenableFuture<?> forceRunSplit(TaskHandle taskHandle, SplitRunner taskSplit)
{
PrioritizedSplitRunner prioritizedSplitRunner = new PrioritizedSplitRunner(taskHandle, taskSplit, ticker);
// Note: we do not record queued time for forced splits
startSplit(prioritizedSplitRunner);
return prioritizedSplitRunner.getFinishedFuture();
}
private synchronized void splitFinished(PrioritizedSplitRunner split)
{
allSplits.remove(split);
pendingSplits.remove(split);
TaskHandle taskHandle = split.getTaskHandle();
taskHandle.splitComplete(split);
wallTime.add(System.nanoTime() - split.createdNanos);
scheduleTaskIfNecessary(taskHandle);
addNewEntrants();
}
private synchronized void scheduleTaskIfNecessary(TaskHandle taskHandle)
{
// if task has less than the minimum guaranteed splits running,
// immediately schedule a new split for this task. This assures
// that a task gets its fair amount of consideration (you have to
// have splits to be considered for running on a thread).
if (taskHandle.getRunningSplits() < GUARANTEED_SPLITS_PER_TASK) {
PrioritizedSplitRunner split = taskHandle.pollNextSplit();
if (split != null) {
startSplit(split);
queuedTime.add(System.nanoTime() - split.createdNanos);
}
}
}
private synchronized void addNewEntrants()
{
int running = allSplits.size();
for (int i = 0; i < minimumNumberOfTasks - running; i++) {
PrioritizedSplitRunner split = pollNextSplitWorker();
if (split == null) {
break;
}
queuedTime.add(System.nanoTime() - split.createdNanos);
startSplit(split);
}
}
private synchronized void startSplit(PrioritizedSplitRunner split)
{
allSplits.add(split);
pendingSplits.put(split);
}
private synchronized PrioritizedSplitRunner pollNextSplitWorker()
{
// todo find a better algorithm for this
// find the first task that produces a split, then move that task to the
// end of the task list, so we get round robin
for (Iterator<TaskHandle> iterator = tasks.iterator(); iterator.hasNext(); ) {
TaskHandle task = iterator.next();
PrioritizedSplitRunner split = task.pollNextSplit();
if (split != null) {
// move task to end of list
iterator.remove();
// CAUTION: we are modifying the list in the loop which would normally
// cause a ConcurrentModificationException but we exit immediately
tasks.add(task);
return split;
}
}
return null;
}
@NotThreadSafe
public static class TaskHandle
{
private final TaskId taskId;
private final Queue<PrioritizedSplitRunner> queuedSplits = new ArrayDeque<>(10);
private final List<PrioritizedSplitRunner> runningSplits = new ArrayList<>(10);
private final AtomicLong taskThreadUsageNanos = new AtomicLong();
private TaskHandle(TaskId taskId)
{
this.taskId = taskId;
}
private long addThreadUsageNanos(long durationNanos)
{
return taskThreadUsageNanos.addAndGet(durationNanos);
}
private TaskId getTaskId()
{
return taskId;
}
private void destroy()
{
for (PrioritizedSplitRunner runningSplit : runningSplits) {
runningSplit.destroy();
}
runningSplits.clear();
for (PrioritizedSplitRunner queuedSplit : queuedSplits) {
queuedSplit.destroy();
}
queuedSplits.clear();
}
private void addSplit(PrioritizedSplitRunner split)
{
queuedSplits.add(split);
}
private int getRunningSplits()
{
return runningSplits.size();
}
private long getThreadUsageNanos()
{
return taskThreadUsageNanos.get();
}
private PrioritizedSplitRunner pollNextSplit()
{
PrioritizedSplitRunner split = queuedSplits.poll();
if (split != null) {
runningSplits.add(split);
}
return split;
}
private void splitComplete(PrioritizedSplitRunner split)
{
runningSplits.remove(split);
split.destroy();
}
@Override
public String toString()
{
return Objects.toStringHelper(this)
.add("taskId", taskId)
.toString();
}
}
private static class PrioritizedSplitRunner
implements Comparable<PrioritizedSplitRunner>
{
private final long createdNanos = System.nanoTime();
private final TaskHandle taskHandle;
private final long workerId;
private final SplitRunner split;
private final Ticker ticker;
private final SettableFuture<?> finishedFuture = SettableFuture.create();
private final AtomicBoolean initialized = new AtomicBoolean();
private final AtomicBoolean destroyed = new AtomicBoolean();
private final AtomicInteger priorityLevel = new AtomicInteger();
private final AtomicLong threadUsageNanos = new AtomicLong();
private final AtomicLong lastRun = new AtomicLong();
private PrioritizedSplitRunner(TaskHandle taskHandle, SplitRunner split, Ticker ticker)
{
this.taskHandle = taskHandle;
this.split = split;
this.ticker = ticker;
this.workerId = NEXT_WORKER_ID.getAndIncrement();
}
private TaskHandle getTaskHandle()
{
return taskHandle;
}
private SettableFuture<?> getFinishedFuture()
{
return finishedFuture;
}
public void initializeIfNecessary()
{
if (initialized.compareAndSet(false, true)) {
split.initialize();
}
}
public void destroy()
{
try {
split.close();
}
catch (RuntimeException e) {
log.error(e, "Error closing split for task %s", taskHandle.getTaskId());
}
destroyed.set(true);
}
public boolean isFinished()
{
boolean finished = split.isFinished();
if (finished) {
finishedFuture.set(null);
}
return finished || destroyed.get();
}
public ListenableFuture<?> process()
throws Exception
{
try {
long start = ticker.read();
ListenableFuture<?> blocked = split.processFor(SPLIT_RUN_QUANTA);
long endTime = ticker.read();
// update priority level base on total thread usage of task
long durationNanos = endTime - start;
long threadUsageNanos = taskHandle.addThreadUsageNanos(durationNanos);
this.threadUsageNanos.set(threadUsageNanos);
priorityLevel.set(calculatePriorityLevel(threadUsageNanos));
// record last run for prioritization within a level
lastRun.set(endTime);
return blocked;
}
catch (Throwable e) {
finishedFuture.setException(e);
throw e;
}
}
public boolean updatePriorityLevel()
{
int newPriority = calculatePriorityLevel(taskHandle.getThreadUsageNanos());
if (newPriority == priorityLevel.getAndSet(newPriority)) {
return false;
}
// update thread usage while if level changed
threadUsageNanos.set(taskHandle.getThreadUsageNanos());
return true;
}
@Override
public int compareTo(PrioritizedSplitRunner o)
{
int level = priorityLevel.get();
int result = Ints.compare(level, o.priorityLevel.get());
if (result != 0) {
return result;
}
if (level < 4) {
result = Long.compare(threadUsageNanos.get(), threadUsageNanos.get());
}
else {
result = Long.compare(lastRun.get(), o.lastRun.get());
}
if (result != 0) {
return result;
}
return Longs.compare(workerId, o.workerId);
}
@Override
public String toString()
{
return String.format("Split %-15s %s %s",
taskHandle.getTaskId(),
priorityLevel,
new Duration(threadUsageNanos.get(), TimeUnit.NANOSECONDS).convertToMostSuccinctTimeUnit());
}
}
private static int calculatePriorityLevel(long threadUsageNanos)
{
long millis = TimeUnit.NANOSECONDS.toMillis(threadUsageNanos);
int priorityLevel;
if (millis < 1000) {
priorityLevel = 0;
}
else if (millis < 10_000) {
priorityLevel = 1;
}
else if (millis < 60_000) {
priorityLevel = 2;
}
else if (millis < 300_000) {
priorityLevel = 3;
}
else {
priorityLevel = 4;
}
return priorityLevel;
}
private class Runner
implements Runnable
{
private final long runnerId = NEXT_RUNNER_ID.getAndIncrement();
@Override
public void run()
{
try (SetThreadName runnerName = new SetThreadName("SplitRunner-%s", runnerId)) {
while (!closed && !Thread.currentThread().isInterrupted()) {
// select next worker
final PrioritizedSplitRunner split;
try {
split = pendingSplits.take();
if (split.updatePriorityLevel()) {
// priority level changed, return split to queue for re-prioritization
pendingSplits.put(split);
continue;
}
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
try (SetThreadName splitName = new SetThreadName(split.toString())) {
runningSplits.add(split);
boolean finished;
ListenableFuture<?> blocked;
try {
split.initializeIfNecessary();
blocked = split.process();
finished = split.isFinished();
}
finally {
runningSplits.remove(split);
}
if (finished) {
log.debug("%s is finished", split);
splitFinished(split);
}
else {
if (blocked.isDone()) {
pendingSplits.put(split);
}
else {
blockedSplits.add(split);
blocked.addListener(new Runnable()
{
@Override
public void run()
{
blockedSplits.remove(split);
split.updatePriorityLevel();
pendingSplits.put(split);
}
}, executor);
}
}
}
catch (Throwable t) {
log.error(t, "Error processing %s", split);
splitFinished(split);
}
}
}
finally {
// unless we have been closed, we need to replace this thread
if (!closed) {
addRunnerThread();
}
}
}
}
//
// STATS
//
@Managed
public int getTasks()
{
return tasks.size();
}
@Managed
public int getRunnerThreads()
{
return runnerThreads;
}
@Managed
public int getMinimumNumberOfTasks()
{
return minimumNumberOfTasks;
}
@Managed
public int getTotalSplits()
{
return allSplits.size();
}
@Managed
public int getPendingSplits()
{
return pendingSplits.size();
}
@Managed
public int getRunningSplits()
{
return runningSplits.size();
}
@Managed
public int getBlockedSplits()
{
return blockedSplits.size();
}
@Managed
public long getCompletedTasksLevel0()
{
return completedTasksPerLevel.get(0);
}
@Managed
public long getCompletedTasksLevel1()
{
return completedTasksPerLevel.get(1);
}
@Managed
public long getCompletedTasksLevel2()
{
return completedTasksPerLevel.get(2);
}
@Managed
public long getCompletedTasksLevel3()
{
return completedTasksPerLevel.get(3);
}
@Managed
public long getCompletedTasksLevel4()
{
return completedTasksPerLevel.get(4);
}
@Managed
public long getRunningTasksLevel0()
{
return calculateRunningTasksForLevel(0);
}
@Managed
public long getRunningTasksLevel1()
{
return calculateRunningTasksForLevel(1);
}
@Managed
public long getRunningTasksLevel2()
{
return calculateRunningTasksForLevel(2);
}
@Managed
public long getRunningTasksLevel3()
{
return calculateRunningTasksForLevel(3);
}
@Managed
public long getRunningTasksLevel4()
{
return calculateRunningTasksForLevel(4);
}
@Managed
@Nested
public DistributionStat getQueuedTime()
{
return queuedTime;
}
@Managed
@Nested
public DistributionStat getWallTime()
{
return wallTime;
}
private synchronized int calculateRunningTasksForLevel(int level)
{
int count = 0;
for (TaskHandle task : tasks) {
if (calculatePriorityLevel(task.getThreadUsageNanos()) == level) {
count++;
}
}
return count;
}
@Managed(description = "Task processor executor")
@Nested
public ThreadPoolExecutorMBean getProcessorExecutor()
{
return executorMBean;
}
}
|
presto-main/src/main/java/com/facebook/presto/execution/TaskExecutor.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution;
import com.facebook.presto.util.SetThreadName;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Objects;
import com.google.common.base.Ticker;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.concurrent.ThreadPoolExecutorMBean;
import io.airlift.log.Logger;
import io.airlift.stats.DistributionStat;
import io.airlift.units.Duration;
import org.weakref.jmx.Managed;
import org.weakref.jmx.Nested;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.NotThreadSafe;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicLongArray;
import static com.facebook.presto.util.Threads.threadsNamed;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
@ThreadSafe
public class TaskExecutor
{
private static final Logger log = Logger.get(TaskExecutor.class);
// each task is guaranteed a minimum number of tasks
private static final int GUARANTEED_SPLITS_PER_TASK = 3;
// each time we run a split, run it for this length before returning to the pool
private static final Duration SPLIT_RUN_QUANTA = new Duration(1, TimeUnit.SECONDS);
private static final AtomicLong NEXT_RUNNER_ID = new AtomicLong();
private static final AtomicLong NEXT_WORKER_ID = new AtomicLong();
private final ExecutorService executor;
private final ThreadPoolExecutorMBean executorMBean;
private final int runnerThreads;
private final int minimumNumberOfTasks;
private final Ticker ticker;
@GuardedBy("this")
private final List<TaskHandle> tasks;
private final Set<PrioritizedSplitRunner> allSplits = new HashSet<>();
private final PriorityBlockingQueue<PrioritizedSplitRunner> pendingSplits;
private final Set<PrioritizedSplitRunner> runningSplits = Sets.newSetFromMap(new ConcurrentHashMap<PrioritizedSplitRunner, Boolean>());
private final Set<PrioritizedSplitRunner> blockedSplits = Sets.newSetFromMap(new ConcurrentHashMap<PrioritizedSplitRunner, Boolean>());
private final AtomicLongArray completedTasksPerLevel = new AtomicLongArray(5);
private final DistributionStat queuedTime = new DistributionStat();
private final DistributionStat wallTime = new DistributionStat();
private boolean closed;
@Inject
public TaskExecutor(TaskManagerConfig config)
{
this(checkNotNull(config, "config is null").getMaxShardProcessorThreads());
}
public TaskExecutor(int runnerThreads)
{
this(runnerThreads, Ticker.systemTicker());
}
@VisibleForTesting
public TaskExecutor(int runnerThreads, Ticker ticker)
{
checkArgument(runnerThreads > 0, "runnerThreads must be at least 1");
// we manages thread pool size directly, so create an unlimited pool
this.executor = Executors.newCachedThreadPool(threadsNamed("task-processor-%d"));
this.executorMBean = new ThreadPoolExecutorMBean((ThreadPoolExecutor) executor);
this.runnerThreads = runnerThreads;
this.ticker = checkNotNull(ticker, "ticker is null");
// we assume we need at least two tasks per runner thread to keep the system busy
this.minimumNumberOfTasks = 2 * this.runnerThreads;
this.pendingSplits = new PriorityBlockingQueue<>(Runtime.getRuntime().availableProcessors() * 10);
this.tasks = new LinkedList<>();
}
@PostConstruct
public synchronized void start()
{
checkState(!closed, "TaskExecutor is closed");
for (int i = 0; i < runnerThreads; i++) {
addRunnerThread();
}
}
@PreDestroy
public synchronized void stop()
{
closed = true;
executor.shutdownNow();
}
@Override
public synchronized String toString()
{
return Objects.toStringHelper(this)
.add("runnerThreads", runnerThreads)
.add("allSplits", allSplits.size())
.add("pendingSplits", pendingSplits.size())
.add("runningSplits", runningSplits.size())
.add("blockedSplits", blockedSplits.size())
.toString();
}
private synchronized void addRunnerThread()
{
try {
executor.execute(new Runner());
}
catch (RejectedExecutionException ignored) {
}
}
public synchronized TaskHandle addTask(TaskId taskId)
{
TaskHandle taskHandle = new TaskHandle(checkNotNull(taskId, "taskId is null"));
tasks.add(taskHandle);
return taskHandle;
}
public synchronized void removeTask(TaskHandle taskHandle)
{
taskHandle.destroy();
tasks.remove(taskHandle);
// record completed stats
long threadUsageNanos = taskHandle.getThreadUsageNanos();
int priorityLevel = calculatePriorityLevel(threadUsageNanos);
completedTasksPerLevel.incrementAndGet(priorityLevel);
}
public synchronized ListenableFuture<?> enqueueSplit(TaskHandle taskHandle, SplitRunner taskSplit)
{
PrioritizedSplitRunner prioritizedSplitRunner = new PrioritizedSplitRunner(taskHandle, taskSplit, ticker);
taskHandle.addSplit(prioritizedSplitRunner);
scheduleTaskIfNecessary(taskHandle);
return prioritizedSplitRunner.getFinishedFuture();
}
public synchronized ListenableFuture<?> forceRunSplit(TaskHandle taskHandle, SplitRunner taskSplit)
{
PrioritizedSplitRunner prioritizedSplitRunner = new PrioritizedSplitRunner(taskHandle, taskSplit, ticker);
// Note: we do not record queued time for forced splits
startSplit(prioritizedSplitRunner);
return prioritizedSplitRunner.getFinishedFuture();
}
private synchronized void splitFinished(PrioritizedSplitRunner split)
{
allSplits.remove(split);
pendingSplits.remove(split);
TaskHandle taskHandle = split.getTaskHandle();
taskHandle.splitComplete(split);
wallTime.add(System.nanoTime() - split.createdNanos);
scheduleTaskIfNecessary(taskHandle);
addNewEntrants();
}
private synchronized void scheduleTaskIfNecessary(TaskHandle taskHandle)
{
// if task has less than the minimum guaranteed splits running,
// immediately schedule a new split for this task. This assures
// that a task gets its fair amount of consideration (you have to
// have splits to be considered for running on a thread).
if (taskHandle.getRunningSplits() < GUARANTEED_SPLITS_PER_TASK) {
PrioritizedSplitRunner split = taskHandle.pollNextSplit();
if (split != null) {
startSplit(split);
queuedTime.add(System.nanoTime() - split.createdNanos);
}
}
}
private synchronized void addNewEntrants()
{
int running = allSplits.size();
for (int i = 0; i < minimumNumberOfTasks - running; i++) {
PrioritizedSplitRunner split = pollNextSplitWorker();
if (split == null) {
break;
}
queuedTime.add(System.nanoTime() - split.createdNanos);
startSplit(split);
}
}
private synchronized void startSplit(PrioritizedSplitRunner split)
{
allSplits.add(split);
pendingSplits.put(split);
}
private synchronized PrioritizedSplitRunner pollNextSplitWorker()
{
// todo find a better algorithm for this
// find the first task that produces a split, then move that task to the
// end of the task list, so we get round robin
for (Iterator<TaskHandle> iterator = tasks.iterator(); iterator.hasNext(); ) {
TaskHandle task = iterator.next();
PrioritizedSplitRunner split = task.pollNextSplit();
if (split != null) {
// move task to end of list
iterator.remove();
// CAUTION: we are modifying the list in the loop which would normally
// cause a ConcurrentModificationException but we exit immediately
tasks.add(task);
return split;
}
}
return null;
}
@NotThreadSafe
public static class TaskHandle
{
private final TaskId taskId;
private final Queue<PrioritizedSplitRunner> queuedSplits = new ArrayDeque<>(10);
private final List<PrioritizedSplitRunner> runningSplits = new ArrayList<>(10);
private final AtomicLong taskThreadUsageNanos = new AtomicLong();
private TaskHandle(TaskId taskId)
{
this.taskId = taskId;
}
private long addThreadUsageNanos(long durationNanos)
{
return taskThreadUsageNanos.addAndGet(durationNanos);
}
private TaskId getTaskId()
{
return taskId;
}
private void destroy()
{
for (PrioritizedSplitRunner runningSplit : runningSplits) {
runningSplit.destroy();
}
runningSplits.clear();
for (PrioritizedSplitRunner queuedSplit : queuedSplits) {
queuedSplit.destroy();
}
queuedSplits.clear();
}
private void addSplit(PrioritizedSplitRunner split)
{
queuedSplits.add(split);
}
private int getRunningSplits()
{
return runningSplits.size();
}
private long getThreadUsageNanos()
{
return taskThreadUsageNanos.get();
}
private PrioritizedSplitRunner pollNextSplit()
{
PrioritizedSplitRunner split = queuedSplits.poll();
if (split != null) {
runningSplits.add(split);
}
return split;
}
private void splitComplete(PrioritizedSplitRunner split)
{
runningSplits.remove(split);
split.destroy();
}
@Override
public String toString()
{
return Objects.toStringHelper(this)
.add("taskId", taskId)
.toString();
}
}
private static class PrioritizedSplitRunner
implements Comparable<PrioritizedSplitRunner>
{
private final long createdNanos = System.nanoTime();
private final TaskHandle taskHandle;
private final long workerId;
private final SplitRunner split;
private final Ticker ticker;
private final SettableFuture<?> finishedFuture = SettableFuture.create();
private final AtomicBoolean initialized = new AtomicBoolean();
private final AtomicBoolean destroyed = new AtomicBoolean();
private final AtomicInteger priorityLevel = new AtomicInteger();
private final AtomicLong threadUsageNanos = new AtomicLong();
private final AtomicLong lastRun = new AtomicLong();
private PrioritizedSplitRunner(TaskHandle taskHandle, SplitRunner split, Ticker ticker)
{
this.taskHandle = taskHandle;
this.split = split;
this.ticker = ticker;
this.workerId = NEXT_WORKER_ID.getAndIncrement();
}
private TaskHandle getTaskHandle()
{
return taskHandle;
}
private SettableFuture<?> getFinishedFuture()
{
return finishedFuture;
}
public void initializeIfNecessary()
{
if (initialized.compareAndSet(false, true)) {
split.initialize();
}
}
public void destroy()
{
try {
split.close();
}
catch (RuntimeException e) {
log.error(e, "Error closing split for task %s", taskHandle.getTaskId());
}
destroyed.set(true);
}
public boolean isFinished()
{
boolean finished = split.isFinished();
if (finished) {
finishedFuture.set(null);
}
return finished || destroyed.get();
}
public ListenableFuture<?> process()
throws Exception
{
try {
long start = ticker.read();
ListenableFuture<?> blocked = split.processFor(SPLIT_RUN_QUANTA);
long endTime = ticker.read();
// update priority level base on total thread usage of task
long durationNanos = endTime - start;
long threadUsageNanos = taskHandle.addThreadUsageNanos(durationNanos);
this.threadUsageNanos.set(threadUsageNanos);
priorityLevel.set(calculatePriorityLevel(threadUsageNanos));
// record last run for prioritization within a level
lastRun.set(endTime);
return blocked;
}
catch (Throwable e) {
finishedFuture.setException(e);
throw e;
}
}
public boolean updatePriorityLevel()
{
int newPriority = calculatePriorityLevel(taskHandle.getThreadUsageNanos());
if (newPriority == priorityLevel.getAndSet(newPriority)) {
return false;
}
// update thread usage while if level changed
threadUsageNanos.set(taskHandle.getThreadUsageNanos());
return true;
}
@Override
public int compareTo(PrioritizedSplitRunner o)
{
int level = priorityLevel.get();
int result = Ints.compare(level, o.priorityLevel.get());
if (result != 0) {
return result;
}
if (level < 4) {
result = Long.compare(threadUsageNanos.get(), threadUsageNanos.get());
}
else {
result = Long.compare(lastRun.get(), o.lastRun.get());
}
if (result != 0) {
return result;
}
return Longs.compare(workerId, o.workerId);
}
@Override
public String toString()
{
return String.format("Split %-15s %s %s",
taskHandle.getTaskId(),
priorityLevel,
new Duration(threadUsageNanos.get(), TimeUnit.NANOSECONDS).convertToMostSuccinctTimeUnit());
}
}
private static int calculatePriorityLevel(long threadUsageNanos)
{
long millis = TimeUnit.NANOSECONDS.toMillis(threadUsageNanos);
int priorityLevel;
if (millis < 1000) {
priorityLevel = 0;
}
else if (millis < 10_000) {
priorityLevel = 1;
}
else if (millis < 60_000) {
priorityLevel = 2;
}
else if (millis < 300_000) {
priorityLevel = 3;
}
else {
priorityLevel = 4;
}
return priorityLevel;
}
private class Runner
implements Runnable
{
private final long runnerId = NEXT_RUNNER_ID.getAndIncrement();
@Override
public void run()
{
try (SetThreadName runnerName = new SetThreadName("SplitRunner-%s", runnerId)) {
while (!closed && !Thread.currentThread().isInterrupted()) {
// select next worker
final PrioritizedSplitRunner split;
try {
split = pendingSplits.take();
if (split.updatePriorityLevel()) {
// priority level changed, return split to queue for re-prioritization
pendingSplits.put(split);
continue;
}
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
try (SetThreadName splitName = new SetThreadName(split.toString())) {
runningSplits.add(split);
boolean finished;
ListenableFuture<?> blocked;
try {
split.initializeIfNecessary();
blocked = split.process();
finished = split.isFinished();
}
finally {
runningSplits.remove(split);
}
if (finished) {
log.debug("%s is finished", split);
splitFinished(split);
}
else {
if (blocked.isDone()) {
pendingSplits.put(split);
}
else {
blockedSplits.add(split);
blocked.addListener(new Runnable()
{
@Override
public void run()
{
blockedSplits.remove(split);
split.updatePriorityLevel();
pendingSplits.put(split);
}
}, executor);
}
}
}
catch (Throwable t) {
log.error(t, "Error processing %s", split);
splitFinished(split);
}
}
}
finally {
// unless we have been closed, we need to replace this thread
if (!closed) {
addRunnerThread();
}
}
}
}
//
// STATS
//
@Managed
public int getTasks()
{
return tasks.size();
}
@Managed
public int getRunnerThreads()
{
return runnerThreads;
}
@Managed
public int getMinimumNumberOfTasks()
{
return minimumNumberOfTasks;
}
@Managed
public int getTotalSplits()
{
return allSplits.size();
}
@Managed
public int getPendingSplits()
{
return pendingSplits.size();
}
@Managed
public int getRunningSplits()
{
return runningSplits.size();
}
@Managed
public int getBlockedSplits()
{
return blockedSplits.size();
}
@Managed
public long getCompletedTasksLevel0()
{
return completedTasksPerLevel.get(0);
}
@Managed
public long getCompletedTasksLevel1()
{
return completedTasksPerLevel.get(1);
}
@Managed
public long getCompletedTasksLevel2()
{
return completedTasksPerLevel.get(2);
}
@Managed
public long getCompletedTasksLevel3()
{
return completedTasksPerLevel.get(3);
}
@Managed
public long getCompletedTasksLevel4()
{
return completedTasksPerLevel.get(4);
}
@Managed
public long getRunningTasksLevel0()
{
return calculateRunningTasksForLevel(0);
}
@Managed
public long getRunningTasksLevel1()
{
return calculateRunningTasksForLevel(1);
}
@Managed
public long getRunningTasksLevel2()
{
return calculateRunningTasksForLevel(2);
}
@Managed
public long getRunningTasksLevel3()
{
return calculateRunningTasksForLevel(3);
}
@Managed
public long getRunningTasksLevel4()
{
return calculateRunningTasksForLevel(4);
}
@Managed
@Nested
public DistributionStat getQueuedTime()
{
return queuedTime;
}
@Managed
@Nested
public DistributionStat getWallTime()
{
return wallTime;
}
private synchronized int calculateRunningTasksForLevel(int level)
{
int count = 0;
for (TaskHandle task : tasks) {
if (calculatePriorityLevel(task.getThreadUsageNanos()) == level) {
count++;
}
}
return count;
}
@Managed(description = "Task processor executor")
@Nested
public ThreadPoolExecutorMBean getProcessorExecutor()
{
return executorMBean;
}
}
|
Check for new entrants after adding new splits
|
presto-main/src/main/java/com/facebook/presto/execution/TaskExecutor.java
|
Check for new entrants after adding new splits
|
|
Java
|
apache-2.0
|
cfcc6e016044791c94ff114d296c11f50873c044
| 0
|
datametica/calcite,datametica/calcite,datametica/calcite,datametica/calcite,datametica/calcite,datametica/calcite
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.rel2sql;
import org.apache.calcite.config.NullCollation;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelTraitDef;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.rules.AggregateJoinTransposeRule;
import org.apache.calcite.rel.rules.AggregateProjectMergeRule;
import org.apache.calcite.rel.rules.CoreRules;
import org.apache.calcite.rel.rules.FilterExtractInnerJoinRule;
import org.apache.calcite.rel.rules.FilterJoinRule;
import org.apache.calcite.rel.rules.ProjectToWindowRule;
import org.apache.calcite.rel.rules.PruneEmptyRules;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
import org.apache.calcite.rel.type.RelRecordType;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexSubQuery;
import org.apache.calcite.runtime.FlatLists;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlDialect.Context;
import org.apache.calcite.sql.SqlDialect.DatabaseProduct;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlIntervalQualifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.SqlWriterConfig;
import org.apache.calcite.sql.dialect.CalciteSqlDialect;
import org.apache.calcite.sql.dialect.HiveSqlDialect;
import org.apache.calcite.sql.dialect.JethroDataSqlDialect;
import org.apache.calcite.sql.dialect.MssqlSqlDialect;
import org.apache.calcite.sql.dialect.MysqlSqlDialect;
import org.apache.calcite.sql.dialect.OracleSqlDialect;
import org.apache.calcite.sql.dialect.PostgresqlSqlDialect;
import org.apache.calcite.sql.dialect.SparkSqlDialect;
import org.apache.calcite.sql.fun.SqlLibraryOperators;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.SqlShuttle;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.test.CalciteAssert;
import org.apache.calcite.test.MockSqlOperatorTable;
import org.apache.calcite.test.RelBuilderTest;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.tools.Planner;
import org.apache.calcite.tools.Program;
import org.apache.calcite.tools.Programs;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RuleSet;
import org.apache.calcite.tools.RuleSets;
import org.apache.calcite.util.TestUtil;
import org.apache.calcite.util.TimestampString;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.calcite.avatica.util.TimeUnit.DAY;
import static org.apache.calcite.avatica.util.TimeUnit.MICROSECOND;
import static org.apache.calcite.sql.fun.SqlLibraryOperators.FALSE;
import static org.apache.calcite.sql.fun.SqlLibraryOperators.TRUE;
import static org.apache.calcite.test.Matchers.isLinux;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Tests for {@link RelToSqlConverter}.
*/
class RelToSqlConverterTest {
/** Initiates a test case with a given SQL query. */
private Sql sql(String sql) {
return new Sql(CalciteAssert.SchemaSpec.JDBC_FOODMART, sql,
CalciteSqlDialect.DEFAULT, SqlParser.Config.DEFAULT,
UnaryOperator.identity(), null, ImmutableList.of());
}
private Sql sqlTest(String sql) {
return new Sql(CalciteAssert.SchemaSpec.FOODMART_TEST, sql,
CalciteSqlDialect.DEFAULT, SqlParser.Config.DEFAULT,
UnaryOperator.identity(), null, ImmutableList.of());
}
/** Initiates a test case with a given {@link RelNode} supplier. */
private Sql relFn(Function<RelBuilder, RelNode> relFn) {
return sql("?").relFn(relFn);
}
private static Planner getPlanner(List<RelTraitDef> traitDefs,
SqlParser.Config parserConfig, SchemaPlus schema,
SqlToRelConverter.Config sqlToRelConf, Program... programs) {
final MockSqlOperatorTable operatorTable =
new MockSqlOperatorTable(SqlStdOperatorTable.instance());
MockSqlOperatorTable.addRamp(operatorTable);
final FrameworkConfig config = Frameworks.newConfigBuilder()
.parserConfig(parserConfig)
.defaultSchema(schema)
.traitDefs(traitDefs)
.sqlToRelConverterConfig(sqlToRelConf)
.programs(programs)
.operatorTable(operatorTable)
.build();
return Frameworks.getPlanner(config);
}
private static JethroDataSqlDialect jethroDataSqlDialect() {
Context dummyContext = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(SqlDialect.DatabaseProduct.JETHRO)
.withDatabaseMajorVersion(1)
.withDatabaseMinorVersion(0)
.withDatabaseVersion("1.0")
.withIdentifierQuoteString("\"")
.withNullCollation(NullCollation.HIGH)
.withJethroInfo(JethroDataSqlDialect.JethroInfo.EMPTY);
return new JethroDataSqlDialect(dummyContext);
}
private static MysqlSqlDialect mySqlDialect(NullCollation nullCollation) {
return new MysqlSqlDialect(MysqlSqlDialect.DEFAULT_CONTEXT
.withNullCollation(nullCollation));
}
/** Returns a collection of common dialects, and the database products they
* represent. */
private static Map<SqlDialect, DatabaseProduct> dialects() {
return ImmutableMap.<SqlDialect, DatabaseProduct>builder()
.put(SqlDialect.DatabaseProduct.BIG_QUERY.getDialect(),
SqlDialect.DatabaseProduct.BIG_QUERY)
.put(SqlDialect.DatabaseProduct.CALCITE.getDialect(),
SqlDialect.DatabaseProduct.CALCITE)
.put(SqlDialect.DatabaseProduct.DB2.getDialect(),
SqlDialect.DatabaseProduct.DB2)
.put(SqlDialect.DatabaseProduct.HIVE.getDialect(),
SqlDialect.DatabaseProduct.HIVE)
.put(jethroDataSqlDialect(),
SqlDialect.DatabaseProduct.JETHRO)
.put(SqlDialect.DatabaseProduct.MSSQL.getDialect(),
SqlDialect.DatabaseProduct.MSSQL)
.put(SqlDialect.DatabaseProduct.MYSQL.getDialect(),
SqlDialect.DatabaseProduct.MYSQL)
.put(mySqlDialect(NullCollation.HIGH),
SqlDialect.DatabaseProduct.MYSQL)
.put(SqlDialect.DatabaseProduct.ORACLE.getDialect(),
SqlDialect.DatabaseProduct.ORACLE)
.put(SqlDialect.DatabaseProduct.POSTGRESQL.getDialect(),
SqlDialect.DatabaseProduct.POSTGRESQL)
.put(DatabaseProduct.PRESTO.getDialect(),
DatabaseProduct.PRESTO)
.build();
}
/** Creates a RelBuilder. */
private static RelBuilder relBuilder() {
return RelBuilder.create(RelBuilderTest.config().build());
}
/** Converts a relational expression to SQL. */
private String toSql(RelNode root) {
return toSql(root, SqlDialect.DatabaseProduct.CALCITE.getDialect());
}
/** Converts a relational expression to SQL in a given dialect. */
private static String toSql(RelNode root, SqlDialect dialect) {
return toSql(root, dialect, c ->
c.withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false)
.withUpdateSetListNewline(false)
.withIndentation(0));
}
/** Converts a relational expression to SQL in a given dialect
* and with a particular writer configuration. */
private static String toSql(RelNode root, SqlDialect dialect,
UnaryOperator<SqlWriterConfig> transform) {
final RelToSqlConverter converter = new RelToSqlConverter(dialect);
final SqlNode sqlNode = converter.visitRoot(root).asStatement();
return sqlNode.toSqlString(c -> transform.apply(c.withDialect(dialect)))
.getSql();
}
@Test public void testSimpleSelectWithOrderByAliasAsc() {
final String query = "select sku+1 as a from \"product\" order by a";
final String bigQueryExpected = "SELECT SKU + 1 AS A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL, A";
final String hiveExpected = "SELECT SKU + 1 A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL, A";
sql(query)
.withBigQuery()
.ok(bigQueryExpected)
.withHive()
.ok(hiveExpected);
}
@Test public void testSimpleSelectWithOrderByAliasDesc() {
final String query = "select sku+1 as a from \"product\" order by a desc";
final String bigQueryExpected = "SELECT SKU + 1 AS A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL DESC, A DESC";
final String hiveExpected = "SELECT SKU + 1 A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL DESC, A DESC";
sql(query)
.withBigQuery()
.ok(bigQueryExpected)
.withHive()
.ok(hiveExpected);
}
@Test void testSimpleSelectStarFromProductTable() {
String query = "select * from \"product\"";
sql(query).ok("SELECT *\nFROM \"foodmart\".\"product\"");
}
@Test void testAggregateFilterWhereToSqlFromProductTable() {
String query = "select\n"
+ " sum(\"shelf_width\") filter (where \"net_weight\" > 0),\n"
+ " sum(\"shelf_width\")\n"
+ "from \"foodmart\".\"product\"\n"
+ "where \"product_id\" > 0\n"
+ "group by \"product_id\"";
final String expected = "SELECT"
+ " SUM(\"shelf_width\") FILTER (WHERE \"net_weight\" > 0 IS TRUE),"
+ " SUM(\"shelf_width\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" > 0\n"
+ "GROUP BY \"product_id\"";
sql(query).ok(expected);
}
@Test void testAggregateFilterWhereToBigQuerySqlFromProductTable() {
String query = "select\n"
+ " sum(\"shelf_width\") filter (where \"net_weight\" > 0),\n"
+ " sum(\"shelf_width\")\n"
+ "from \"foodmart\".\"product\"\n"
+ "where \"product_id\" > 0\n"
+ "group by \"product_id\"";
final String expected = "SELECT SUM(CASE WHEN net_weight > 0 IS TRUE"
+ " THEN shelf_width ELSE NULL END), "
+ "SUM(shelf_width)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id > 0\n"
+ "GROUP BY product_id";
sql(query).withBigQuery().ok(expected);
}
@Test void testPivotToSqlFromProductTable() {
String query = "select * from (\n"
+ " select \"shelf_width\", \"net_weight\", \"product_id\"\n"
+ " from \"foodmart\".\"product\")\n"
+ " pivot (sum(\"shelf_width\") as w, count(*) as c\n"
+ " for (\"product_id\") in (10, 20))";
final String expected = "SELECT \"net_weight\","
+ " SUM(\"shelf_width\") FILTER (WHERE \"product_id\" = 10) AS \"10_W\","
+ " COUNT(*) FILTER (WHERE \"product_id\" = 10) AS \"10_C\","
+ " SUM(\"shelf_width\") FILTER (WHERE \"product_id\" = 20) AS \"20_W\","
+ " COUNT(*) FILTER (WHERE \"product_id\" = 20) AS \"20_C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"net_weight\"";
// BigQuery does not support FILTER, so we generate CASE around the
// arguments to the aggregate functions.
final String expectedBigQuery = "SELECT net_weight,"
+ " SUM(CASE WHEN product_id = 10 "
+ "THEN shelf_width ELSE NULL END) AS `10_W`,"
+ " COUNT(CASE WHEN product_id = 10 THEN 1 ELSE NULL END) AS `10_C`,"
+ " SUM(CASE WHEN product_id = 20 "
+ "THEN shelf_width ELSE NULL END) AS `20_W`,"
+ " COUNT(CASE WHEN product_id = 20 THEN 1 ELSE NULL END) AS `20_C`\n"
+ "FROM foodmart.product\n"
+ "GROUP BY net_weight";
sql(query).ok(expected)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testSimpleSelectQueryFromProductTable() {
String query = "select \"product_id\", \"product_class_id\" from \"product\"";
final String expected = "SELECT \"product_id\", \"product_class_id\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithWhereClauseOfLessThan() {
String query = "select \"product_id\", \"shelf_width\"\n"
+ "from \"product\" where \"product_id\" < 10";
final String expected = "SELECT \"product_id\", \"shelf_width\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" < 10";
sql(query).ok(expected);
}
@Test void testSelectWhereNotEqualsOrNull() {
String query = "select \"product_id\", \"shelf_width\"\n"
+ "from \"product\"\n"
+ "where \"net_weight\" <> 10 or \"net_weight\" is null";
final String expected = "SELECT \"product_id\", \"shelf_width\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" <> 10 OR \"net_weight\" IS NULL";
sql(query).ok(expected);
}
@Test void testSelectQueryWithWhereClauseOfBasicOperators() {
String query = "select * from \"product\" "
+ "where (\"product_id\" = 10 OR \"product_id\" <= 5) "
+ "AND (80 >= \"shelf_width\" OR \"shelf_width\" > 30)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE (\"product_id\" = 10 OR \"product_id\" <= 5) "
+ "AND (80 >= \"shelf_width\" OR \"shelf_width\" > 30)";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroupBy() {
String query = "select count(*) from \"product\" group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithHiveCube() {
String query = "select \"product_class_id\", \"product_id\", count(*) "
+ "from \"product\" group by cube(\"product_class_id\", \"product_id\")";
String expected = "SELECT product_class_id, product_id, COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, product_id WITH CUBE";
sql(query).withHive().ok(expected);
SqlDialect sqlDialect = sql(query).withHive().dialect;
assertTrue(sqlDialect.supportsGroupByWithCube());
}
@Test void testSelectQueryWithHiveRollup() {
String query = "select \"product_class_id\", \"product_id\", count(*) "
+ "from \"product\" group by rollup(\"product_class_id\", \"product_id\")";
String expected = "SELECT product_class_id, product_id, COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, product_id WITH ROLLUP";
sql(query).withHive().ok(expected);
SqlDialect sqlDialect = sql(query).withHive().dialect;
assertTrue(sqlDialect.supportsGroupByWithRollup());
}
@Test void testSelectQueryWithGroupByEmpty() {
final String sql0 = "select count(*) from \"product\" group by ()";
final String sql1 = "select count(*) from \"product\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMySql = "SELECT COUNT(*)\n"
+ "FROM `foodmart`.`product`";
final String expectedPresto = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
sql(sql0)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto);
sql(sql1)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto);
}
@Test void testSelectQueryWithGroupByEmpty2() {
final String query = "select 42 as c from \"product\" group by ()";
final String expected = "SELECT 42 AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ()";
final String expectedMySql = "SELECT 42 AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY ()";
final String expectedPresto = "SELECT 42 AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ()";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3097">[CALCITE-3097]
* GROUPING SETS breaks on sets of size > 1 due to precedence issues</a>,
* in particular, that we maintain proper precedence around nested lists. */
@Test void testGroupByGroupingSets() {
final String query = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
+ "group by GROUPING SETS ((\"product_class_id\", \"brand_name\"),"
+ " (\"product_class_id\"))\n"
+ "order by 2, 1";
final String expected = "SELECT \"product_class_id\", \"brand_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY GROUPING SETS((\"product_class_id\", \"brand_name\"),"
+ " \"product_class_id\")\n"
+ "ORDER BY \"brand_name\", \"product_class_id\"";
sql(query)
.withPostgresql()
.ok(expected);
}
/** Tests GROUP BY ROLLUP of two columns. The SQL for MySQL has
* "GROUP BY ... ROLLUP" but no "ORDER BY". */
@Test void testSelectQueryWithGroupByRollup() {
final String query = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 1, 2";
final String expected = "SELECT \"product_class_id\", \"brand_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\", \"brand_name\")\n"
+ "ORDER BY \"product_class_id\", \"brand_name\"";
final String expectedMySql = "SELECT `product_class_id`, `brand_name`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id`, `brand_name` WITH ROLLUP";
final String expectedMySql8 = "SELECT `product_class_id`, `brand_name`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY ROLLUP(`product_class_id`, `brand_name`)\n"
+ "ORDER BY `product_class_id` NULLS LAST, `brand_name` NULLS LAST";
final String expectedHive = "SELECT product_class_id, brand_name\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, brand_name WITH ROLLUP";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withMysql8()
.ok(expectedMySql8)
.withHive()
.ok(expectedHive);
}
/** As {@link #testSelectQueryWithGroupByRollup()},
* but ORDER BY columns reversed. */
@Test void testSelectQueryWithGroupByRollup2() {
final String query = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 2, 1";
final String expected = "SELECT \"product_class_id\", \"brand_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\", \"brand_name\")\n"
+ "ORDER BY \"brand_name\", \"product_class_id\"";
final String expectedMySql = "SELECT `product_class_id`, `brand_name`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `brand_name`, `product_class_id` WITH ROLLUP";
final String expectedHive = "SELECT product_class_id, brand_name\n"
+ "FROM foodmart.product\n"
+ "GROUP BY brand_name, product_class_id WITH ROLLUP";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withHive()
.ok(expectedHive);
}
@Test public void testSimpleSelectWithGroupByAlias() {
final String query = "select 'literal' as \"a\", sku + 1 as b from"
+ " \"product\" group by 'literal', sku + 1";
final String bigQueryExpected = "SELECT 'literal' AS a, SKU + 1 AS B\n"
+ "FROM foodmart.product\n"
+ "GROUP BY 1, B";
sql(query)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testSimpleSelectWithGroupByAliasAndAggregate() {
final String query = "select 'literal' as \"a\", sku + 1 as \"b\", sum(\"product_id\") from"
+ " \"product\" group by sku + 1, 'literal'";
final String bigQueryExpected = "SELECT 'literal' AS a, SKU + 1 AS b, SUM(product_id)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY b, 1";
sql(query)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testDuplicateLiteralInSelectForGroupBy() {
final String query = "select '1' as \"a\", sku + 1 as b, '1' as \"d\" from"
+ " \"product\" group by '1', sku + 1";
final String expectedSql = "SELECT '1' a, SKU + 1 B, '1' d\n"
+ "FROM foodmart.product\n"
+ "GROUP BY '1', SKU + 1";
final String bigQueryExpected = "SELECT '1' AS a, SKU + 1 AS B, '1' AS d\n"
+ "FROM foodmart.product\n"
+ "GROUP BY 1, B";
sql(query)
.withHive()
.ok(expectedSql)
.withSpark()
.ok(expectedSql)
.withBigQuery()
.ok(bigQueryExpected);
}
/** Tests a query with GROUP BY and a sub-query which is also with GROUP BY.
* If we flatten sub-queries, the number of rows going into AVG becomes
* incorrect. */
@Test void testSelectQueryWithGroupBySubQuery1() {
final String query = "select \"product_class_id\", avg(\"product_id\")\n"
+ "from (select \"product_class_id\", \"product_id\", avg(\"product_class_id\")\n"
+ "from \"product\"\n"
+ "group by \"product_class_id\", \"product_id\") as t\n"
+ "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", AVG(\"product_id\")\n"
+ "FROM (SELECT \"product_class_id\", \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\") AS \"t1\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
/** Tests query without GROUP BY but an aggregate function
* and a sub-query which is with GROUP BY. */
@Test void testSelectQueryWithGroupBySubQuery2() {
final String query = "select sum(\"product_id\")\n"
+ "from (select \"product_class_id\", \"product_id\"\n"
+ "from \"product\"\n"
+ "group by \"product_class_id\", \"product_id\") as t";
final String expected = "SELECT SUM(\"product_id\")\n"
+ "FROM (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\") AS \"t1\"";
final String expectedMysql = "SELECT SUM(`product_id`)\n"
+ "FROM (SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id`, `product_id`) AS `t1`";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMysql);
// Equivalent sub-query that uses SELECT DISTINCT
final String query2 = "select sum(\"product_id\")\n"
+ "from (select distinct \"product_class_id\", \"product_id\"\n"
+ " from \"product\") as t";
sql(query2)
.ok(expected)
.withMysql()
.ok(expectedMysql);
}
/** CUBE of one column is equivalent to ROLLUP, and Calcite recognizes
* this. */
@Test void testSelectQueryWithSingletonCube() {
final String query = "select \"product_class_id\", count(*) as c\n"
+ "from \"product\"\n"
+ "group by cube(\"product_class_id\")\n"
+ "order by 1, 2";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "ORDER BY \"product_class_id\", \"C\"";
final String expectedMySql = "SELECT `product_class_id`, COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id` WITH ROLLUP\n"
+ "ORDER BY `product_class_id` IS NULL, `product_class_id`,"
+ " `C` IS NULL, `C`";
final String expectedHive = "SELECT product_class_id, COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id WITH ROLLUP\n"
+ "ORDER BY product_class_id IS NULL, product_class_id,"
+ " C IS NULL, C";
final String expectedPresto = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "ORDER BY \"product_class_id\" IS NULL, \"product_class_id\", "
+ "COUNT(*) IS NULL, COUNT(*)";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto)
.withHive()
.ok(expectedHive);
}
/** As {@link #testSelectQueryWithSingletonCube()}, but no ORDER BY
* clause. */
@Test void testSelectQueryWithSingletonCubeNoOrderBy() {
final String query = "select \"product_class_id\", count(*) as c\n"
+ "from \"product\"\n"
+ "group by cube(\"product_class_id\")";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")";
final String expectedMySql = "SELECT `product_class_id`, COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id` WITH ROLLUP";
final String expectedPresto = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")";
final String expectedHive = "SELECT product_class_id, COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id WITH ROLLUP";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto)
.withHive()
.ok(expectedHive);
}
/** Cannot rewrite if ORDER BY contains a column not in GROUP BY (in this
* case COUNT(*)). */
@Test void testSelectQueryWithRollupOrderByCount() {
final String query = "select \"product_class_id\", \"brand_name\",\n"
+ " count(*) as c\n"
+ "from \"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 1, 2, 3";
final String expected = "SELECT \"product_class_id\", \"brand_name\","
+ " COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\", \"brand_name\")\n"
+ "ORDER BY \"product_class_id\", \"brand_name\", \"C\"";
final String expectedMySql = "SELECT `product_class_id`, `brand_name`,"
+ " COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id`, `brand_name` WITH ROLLUP\n"
+ "ORDER BY `product_class_id` IS NULL, `product_class_id`,"
+ " `brand_name` IS NULL, `brand_name`,"
+ " `C` IS NULL, `C`";
final String expectedHive = "SELECT product_class_id, brand_name,"
+ " COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, brand_name WITH ROLLUP\n"
+ "ORDER BY product_class_id IS NULL, product_class_id,"
+ " brand_name IS NULL, brand_name,"
+ " C IS NULL, C";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withHive()
.ok(expectedHive);
}
/** As {@link #testSelectQueryWithSingletonCube()}, but with LIMIT. */
@Test void testSelectQueryWithCubeLimit() {
final String query = "select \"product_class_id\", count(*) as c\n"
+ "from \"product\"\n"
+ "group by cube(\"product_class_id\")\n"
+ "limit 5";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "FETCH NEXT 5 ROWS ONLY";
// If a MySQL 5 query has GROUP BY ... ROLLUP, you cannot add ORDER BY,
// but you can add LIMIT.
final String expectedMySql = "SELECT `product_class_id`, COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id` WITH ROLLUP\n"
+ "LIMIT 5";
final String expectedPresto = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "LIMIT 5";
final String expectedHive = "SELECT product_class_id, COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id WITH ROLLUP\n"
+ "LIMIT 5";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto)
.withHive()
.ok(expectedHive);
}
@Test void testSelectQueryWithMinAggregateFunction() {
String query = "select min(\"net_weight\") from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT MIN(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithMinAggregateFunction1() {
String query = "select \"product_class_id\", min(\"net_weight\") from"
+ " \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", MIN(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithSumAggregateFunction() {
String query =
"select sum(\"net_weight\") from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT SUM(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithMultipleAggregateFunction() {
String query = "select sum(\"net_weight\"), min(\"low_fat\"), count(*)"
+ " from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT SUM(\"net_weight\"), MIN(\"low_fat\"),"
+ " COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithMultipleAggregateFunction1() {
String query = "select \"product_class_id\","
+ " sum(\"net_weight\"), min(\"low_fat\"), count(*)"
+ " from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT \"product_class_id\","
+ " SUM(\"net_weight\"), MIN(\"low_fat\"), COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroupByAndProjectList() {
String query = "select \"product_class_id\", \"product_id\", count(*) "
+ "from \"product\" group by \"product_class_id\", \"product_id\" ";
final String expected = "SELECT \"product_class_id\", \"product_id\","
+ " COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
sql(query).ok(expected);
}
/*@Test public void testGroupByAliasReplacementWithGroupByExpression() {
String query = "select \"product_class_id\" + \"product_id\" as product_id, "
+ "\"product_id\" + 2 as prod_id, count(1) as num_records"
+ " from \"product\""
+ " group by \"product_class_id\" + \"product_id\", \"product_id\" + 2";
final String expected = "SELECT product_class_id + product_id AS PRODUCT_ID,"
+ " product_id + 2 AS PROD_ID,"
+ " COUNT(*) AS NUM_RECORDS\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id + product_id, PROD_ID";
sql(query).withBigQuery().ok(expected);
}
@Test public void testGroupByAliasReplacementWithGroupByExpression2() {
String query = "select "
+ "(case when \"product_id\" = 1 then \"product_id\" else 1234 end)"
+ " as product_id, count(1) as num_records from \"product\""
+ " group by (case when \"product_id\" = 1 then \"product_id\" else 1234 end)";
final String expected = "SELECT "
+ "CASE WHEN product_id = 1 THEN product_id ELSE 1234 END AS PRODUCT_ID,"
+ " COUNT(*) AS NUM_RECORDS\n"
+ "FROM foodmart.product\n"
+ "GROUP BY CASE WHEN product_id = 1 THEN product_id ELSE 1234 END";
sql(query).withBigQuery().ok(expected);
}*/
@Test void testCastDecimal1() {
final String query = "select -0.0000000123\n"
+ " from \"expense_fact\"";
final String expected = "SELECT -1.23E-8\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2713">[CALCITE-2713]
* JDBC adapter may generate casts on PostgreSQL for VARCHAR type exceeding
* max length</a>. */
@Test void testCastLongVarchar1() {
final String query = "select cast(\"store_id\" as VARCHAR(10485761))\n"
+ " from \"expense_fact\"";
final String expectedPostgreSQL = "SELECT CAST(\"store_id\" AS VARCHAR(256))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withPostgresqlModifiedTypeSystem()
.ok(expectedPostgreSQL);
final String expectedOracle = "SELECT CAST(\"store_id\" AS VARCHAR(512))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withOracleModifiedTypeSystem()
.ok(expectedOracle);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2713">[CALCITE-2713]
* JDBC adapter may generate casts on PostgreSQL for VARCHAR type exceeding
* max length</a>. */
@Test void testCastLongVarchar2() {
final String query = "select cast(\"store_id\" as VARCHAR(175))\n"
+ " from \"expense_fact\"";
final String expectedPostgreSQL = "SELECT CAST(\"store_id\" AS VARCHAR(175))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withPostgresqlModifiedTypeSystem()
.ok(expectedPostgreSQL);
final String expectedOracle = "SELECT CAST(\"store_id\" AS VARCHAR(175))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withOracleModifiedTypeSystem()
.ok(expectedOracle);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1174">[CALCITE-1174]
* When generating SQL, translate SUM0(x) to COALESCE(SUM(x), 0)</a>. */
@Test void testSum0BecomesCoalesce() {
final Function<RelBuilder, RelNode> fn = b -> b.scan("EMP")
.aggregate(b.groupKey(),
b.aggregateCall(SqlStdOperatorTable.SUM0, b.field(3))
.as("s"))
.build();
final String expectedMysql = "SELECT COALESCE(SUM(`MGR`), 0) AS `s`\n"
+ "FROM `scott`.`EMP`";
final String expectedPostgresql = "SELECT COALESCE(SUM(\"MGR\"), 0) AS \"s\"\n"
+ "FROM \"scott\".\"EMP\"";
relFn(fn)
.withPostgresql()
.ok(expectedPostgresql)
.withMysql()
.ok(expectedMysql);
}
/** As {@link #testSum0BecomesCoalesce()} but for windowed aggregates. */
@Test void testWindowedSum0BecomesCoalesce() {
final String query = "select\n"
+ " AVG(\"net_weight\") OVER (order by \"product_id\" rows 3 preceding)\n"
+ "from \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT CASE WHEN (COUNT(\"net_weight\")"
+ " OVER (ORDER BY \"product_id\" ROWS BETWEEN 3 PRECEDING AND CURRENT ROW)) > 0 "
+ "THEN COALESCE(SUM(\"net_weight\")"
+ " OVER (ORDER BY \"product_id\" ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), 0)"
+ " ELSE NULL END / (COUNT(\"net_weight\")"
+ " OVER (ORDER BY \"product_id\" ROWS BETWEEN 3 PRECEDING AND CURRENT ROW))\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withPostgresql()
.ok(expectedPostgresql);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2722">[CALCITE-2722]
* SqlImplementor createLeftCall method throws StackOverflowError</a>. */
@Test void testStack() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(
b.or(
IntStream.range(1, 10000)
.mapToObj(i -> b.equals(b.field("EMPNO"), b.literal(i)))
.collect(Collectors.toList())))
.build();
final SqlDialect dialect = SqlDialect.DatabaseProduct.CALCITE.getDialect();
final RelNode root = relFn.apply(relBuilder());
final RelToSqlConverter converter = new RelToSqlConverter(dialect);
final SqlNode sqlNode = converter.visitRoot(root).asStatement();
final String sqlString = sqlNode.accept(new SqlShuttle())
.toSqlString(dialect).getSql();
assertThat(sqlString, notNullValue());
}
@Test void testAntiJoin() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.join(
JoinRelType.ANTI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE NOT EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiJoin() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiJoinFilter() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.filter(
builder.call(SqlStdOperatorTable.GREATER_THAN,
builder.field(builder.peek().getRowType().getField("EMPNO", false, false).getIndex()),
builder.literal((short) 10)))
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM (SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" > 10) AS \"t\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiJoinProject() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.project(
builder.field(builder.peek().getRowType().getField("EMPNO", false, false).getIndex()),
builder.field(builder.peek().getRowType().getField("DEPTNO", false, false).getIndex()))
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM (SELECT \"EMPNO\", \"DEPTNO\"\n"
+ "FROM \"scott\".\"EMP\") AS \"t\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiNestedJoin() {
final RelBuilder builder = relBuilder();
final RelNode base = builder
.scan("EMP")
.scan("EMP")
.join(
JoinRelType.INNER, builder.equals(
builder.field(2, 0, "EMPNO"),
builder.field(2, 1, "EMPNO")))
.build();
final RelNode root = builder
.scan("DEPT")
.push(base)
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "INNER JOIN \"scott\".\"EMP\" AS \"EMP0\" ON \"EMP\".\"EMPNO\" = \"EMP0\".\"EMPNO\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2792">[CALCITE-2792]
* Stackoverflow while evaluating filter with large number of OR conditions</a>. */
@Disabled
@Test void testBalancedBinaryCall() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(
b.and(
b.or(IntStream.range(0, 4)
.mapToObj(i -> b.equals(b.field("EMPNO"), b.literal(i)))
.collect(Collectors.toList())),
b.or(IntStream.range(5, 8)
.mapToObj(i -> b.equals(b.field("DEPTNO"), b.literal(i)))
.collect(Collectors.toList()))))
.build();
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" IN (0, 1, 2, 3) AND \"DEPTNO\" IN (5, 6, 7)";
relFn(relFn).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1946">[CALCITE-1946]
* JDBC adapter should generate sub-SELECT if dialect does not support nested
* aggregate functions</a>. */
@Test void testNestedAggregates() {
// PostgreSQL, MySQL, Vertica do not support nested aggregate functions, so
// for these, the JDBC adapter generates a SELECT in the FROM clause.
// Oracle can do it in a single SELECT.
final String query = "select\n"
+ " SUM(\"net_weight1\") as \"net_weight_converted\"\n"
+ " from ("
+ " select\n"
+ " SUM(\"net_weight\") as \"net_weight1\"\n"
+ " from \"foodmart\".\"product\"\n"
+ " group by \"product_id\")";
final String expectedOracle = "SELECT SUM(SUM(\"net_weight\")) \"net_weight_converted\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\"";
final String expectedMySQL = "SELECT SUM(`net_weight1`) AS `net_weight_converted`\n"
+ "FROM (SELECT SUM(`net_weight`) AS `net_weight1`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_id`) AS `t1`";
final String expectedPostgresql = "SELECT SUM(\"net_weight1\") AS \"net_weight_converted\"\n"
+ "FROM (SELECT SUM(\"net_weight\") AS \"net_weight1\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\") AS \"t1\"";
final String expectedVertica = expectedPostgresql;
final String expectedBigQuery = "SELECT SUM(net_weight1) AS net_weight_converted\n"
+ "FROM (SELECT SUM(net_weight) AS net_weight1\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id) AS t1";
final String expectedHive = "SELECT SUM(net_weight1) net_weight_converted\n"
+ "FROM (SELECT SUM(net_weight) net_weight1\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id) t1";
final String expectedSpark = expectedHive;
sql(query)
.withOracle()
.ok(expectedOracle)
.withMysql()
.ok(expectedMySQL)
.withVertica()
.ok(expectedVertica)
.withPostgresql()
.ok(expectedPostgresql)
.withBigQuery()
.ok(expectedBigQuery)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark);
}
@Test public void testAnalyticalFunctionInAggregate() {
final String query = "select\n"
+ "MAX(\"rnk\") AS \"rnk1\""
+ " from ("
+ " select\n"
+ " rank() over (order by \"hire_date\") AS \"rnk\""
+ " from \"foodmart\".\"employee\"\n)";
final String expectedSql = "SELECT MAX(RANK() OVER (ORDER BY \"hire_date\")) AS \"rnk1\"\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT RANK() OVER (ORDER BY hire_date NULLS LAST) rnk\n"
+ "FROM foodmart.employee) t";
final String expectedSpark = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT RANK() OVER (ORDER BY hire_date NULLS LAST) rnk\n"
+ "FROM foodmart.employee) t";
final String expectedBigQuery = "SELECT MAX(rnk) AS rnk1\n"
+ "FROM (SELECT RANK() OVER (ORDER BY hire_date IS NULL, hire_date) AS rnk\n"
+ "FROM foodmart.employee) AS t";
sql(query)
.ok(expectedSql)
.withHive2()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testAnalyticalFunctionInAggregate1() {
final String query = "select\n"
+ "MAX(\"rnk\") AS \"rnk1\""
+ " from ("
+ " select\n"
+ " case when rank() over (order by \"hire_date\") = 1"
+ " then 100"
+ " else 200"
+ " end as \"rnk\""
+ " from \"foodmart\".\"employee\"\n)";
final String expectedSql = "SELECT MAX(CASE WHEN (RANK() OVER (ORDER BY \"hire_date\")) = 1 "
+ "THEN 100 ELSE 200 END) AS \"rnk1\"\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT CASE WHEN (RANK() OVER (ORDER BY hire_date NULLS LAST)) = 1"
+ " THEN 100 ELSE 200 END rnk\n"
+ "FROM foodmart.employee) t";
final String expectedSpark = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT CASE WHEN (RANK() OVER (ORDER BY hire_date NULLS LAST)) = 1 "
+ "THEN 100 ELSE 200 END rnk\n"
+ "FROM foodmart.employee) t";
final String expectedBigQuery = "SELECT MAX(rnk) AS rnk1\n"
+ "FROM (SELECT CASE WHEN (RANK() OVER (ORDER BY hire_date IS NULL, hire_date)) = 1 "
+ "THEN 100 ELSE 200 END AS rnk\n"
+ "FROM foodmart.employee) AS t";
sql(query)
.ok(expectedSql)
.withHive2()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testAnalyticalFunctionInGroupByWhereAnalyticalFunctionIsInputOfOtherFunction() {
final String query = "select\n"
+ "\"rnk\""
+ " from ("
+ " select\n"
+ " CASE WHEN \"salary\"=20 THEN MAX(\"salary\") OVER(PARTITION BY \"position_id\") END AS \"rnk\""
+ " from \"foodmart\".\"employee\"\n) group by \"rnk\"";
final String expectedSql = "SELECT CASE WHEN CAST(\"salary\" AS DECIMAL(14, 4)) = 20 THEN"
+ " MAX(\"salary\") OVER (PARTITION BY \"position_id\" RANGE BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING) ELSE NULL END AS \"rnk\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY CASE WHEN CAST(\"salary\" AS DECIMAL(14, 4)) = 20 THEN MAX"
+ "(\"salary\") OVER (PARTITION BY \"position_id\" RANGE BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING) ELSE NULL END";
final String expectedHive = "SELECT CASE WHEN CAST(salary AS DECIMAL(14, 4)) = 20 THEN MAX"
+ "(salary) OVER (PARTITION BY position_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED "
+ "FOLLOWING) ELSE NULL END rnk\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY CASE WHEN CAST(salary AS DECIMAL(14, 4)) = 20 THEN MAX(salary) OVER "
+ "(PARTITION BY position_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) "
+ "ELSE NULL END";
final String expectedSpark = expectedHive;
final String expectedBigQuery = "SELECT rnk\n"
+ "FROM (SELECT CASE WHEN CAST(salary AS NUMERIC) = 20 THEN MAX(salary) OVER "
+ "(PARTITION BY position_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) "
+ "ELSE NULL END AS rnk\n"
+ "FROM foodmart.employee) AS t\n"
+ "GROUP BY rnk";
final String mssql = "SELECT CASE WHEN CAST([salary] AS DECIMAL(14, 4)) = 20 THEN MAX("
+ "[salary]) OVER (PARTITION BY [position_id] ORDER BY [salary] ROWS BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING) ELSE NULL END AS [rnk]\n"
+ "FROM [foodmart].[employee]\n"
+ "GROUP BY CASE WHEN CAST([salary] AS DECIMAL(14, 4)) = 20 THEN MAX([salary]) OVER "
+ "(PARTITION BY [position_id] ORDER BY [salary] ROWS BETWEEN UNBOUNDED PRECEDING AND "
+ "UNBOUNDED FOLLOWING) ELSE NULL END";
sql(query)
.ok(expectedSql)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(mssql);
}
@Test public void testAnalyticalFunctionInGroupByWhereAnalyticalFunctionIsInput() {
final String query = "select\n"
+ "\"rnk\""
+ " from ("
+ " select\n"
+ " case when row_number() over (PARTITION by \"hire_date\") = 1 THEN 100 else 200 END AS \"rnk\""
+ " from \"foodmart\".\"employee\"\n) group by \"rnk\"";
final String expectedSql = "SELECT CASE WHEN (ROW_NUMBER() OVER (PARTITION BY \"hire_date\"))"
+ " = 1 THEN 100 ELSE 200 END AS \"rnk\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY CASE WHEN"
+ " (ROW_NUMBER() OVER (PARTITION BY \"hire_date\")) = 1 THEN 100 ELSE 200 END";
final String expectedHive = "SELECT CASE WHEN (ROW_NUMBER() OVER (PARTITION BY hire_date)) = "
+ "1 THEN 100 ELSE 200 END rnk\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY CASE WHEN (ROW_NUMBER() "
+ "OVER (PARTITION BY hire_date)) = 1 THEN 100 ELSE 200 END";
final String expectedSpark = expectedHive;
final String expectedBigQuery = "SELECT rnk\n"
+ "FROM (SELECT CASE WHEN (ROW_NUMBER() OVER "
+ "(PARTITION BY hire_date)) = 1 THEN 100 ELSE 200 END AS rnk\n"
+ "FROM foodmart.employee) AS t\n"
+ "GROUP BY rnk";
final String mssql = "SELECT CASE WHEN (ROW_NUMBER() OVER (PARTITION BY [hire_date])) = 1 "
+ "THEN 100 ELSE 200 END AS [rnk]\n"
+ "FROM [foodmart].[employee]\nGROUP BY CASE WHEN "
+ "(ROW_NUMBER() OVER (PARTITION BY [hire_date])) = 1 THEN 100 ELSE 200 END";
sql(query)
.ok(expectedSql)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(mssql);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2628">[CALCITE-2628]
* JDBC adapter throws NullPointerException while generating GROUP BY query
* for MySQL</a>.
*
* <p>MySQL does not support nested aggregates, so {@link RelToSqlConverter}
* performs some extra checks, looking for aggregates in the input
* sub-query, and these would fail with {@code NullPointerException}
* and {@code ClassCastException} in some cases. */
@Test void testNestedAggregatesMySqlTable() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.aggregate(b.groupKey(),
b.count(false, "c", b.field(3)))
.build();
final String expectedSql = "SELECT COUNT(`MGR`) AS `c`\n"
+ "FROM `scott`.`EMP`";
relFn(relFn).withMysql().ok(expectedSql);
}
/** As {@link #testNestedAggregatesMySqlTable()}, but input is a sub-query,
* not a table. */
@Test void testNestedAggregatesMySqlStar() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(b.equals(b.field("DEPTNO"), b.literal(10)))
.aggregate(b.groupKey(),
b.count(false, "c", b.field(3)))
.build();
final String expectedSql = "SELECT COUNT(`MGR`) AS `c`\n"
+ "FROM `scott`.`EMP`\n"
+ "WHERE `DEPTNO` = 10";
relFn(relFn).withMysql().ok(expectedSql);
}
@Test public void testTableFunctionScanWithUnnest() {
final RelBuilder builder = relBuilder();
String[] array = {"abc", "bcd", "fdc"};
RelNode root = builder.functionScan(SqlStdOperatorTable.UNNEST, 0,
builder.makeArrayLiteral(Arrays.asList(array))).project(builder.field(0)).build();
final SqlDialect dialect = DatabaseProduct.BIG_QUERY.getDialect();
final String expectedSql = "SELECT *\nFROM UNNEST(ARRAY['abc', 'bcd', 'fdc'])\nAS EXPR$0";
assertThat(toSql(root, dialect), isLinux(expectedSql));
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3207">[CALCITE-3207]
* Fail to convert Join RelNode with like condition to sql statement </a>.
*/
@Test void testJoinWithLikeConditionRel2Sql() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.scan("DEPT")
.join(JoinRelType.LEFT,
b.and(
b.call(SqlStdOperatorTable.EQUALS,
b.field(2, 0, "DEPTNO"),
b.field(2, 1, "DEPTNO")),
b.call(SqlStdOperatorTable.LIKE,
b.field(2, 1, "DNAME"),
b.literal("ACCOUNTING"))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "LEFT JOIN \"scott\".\"DEPT\" "
+ "ON \"EMP\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" "
+ "AND \"DEPT\".\"DNAME\" LIKE 'ACCOUNTING'";
relFn(relFn).ok(expectedSql);
}
@Test void testSelectQueryWithGroupByAndProjectList1() {
String query = "select count(*) from \"product\"\n"
+ "group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroupByHaving() {
String query = "select count(*) from \"product\" group by \"product_class_id\","
+ " \"product_id\" having \"product_id\" > 10";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"\n"
+ "HAVING \"product_id\" > 10";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1665">[CALCITE-1665]
* Aggregates and having cannot be combined</a>. */
@Test void testSelectQueryWithGroupByHaving2() {
String query = " select \"product\".\"product_id\",\n"
+ " min(\"sales_fact_1997\".\"store_id\")\n"
+ " from \"product\"\n"
+ " inner join \"sales_fact_1997\"\n"
+ " on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ " group by \"product\".\"product_id\"\n"
+ " having count(*) > 1";
String expected = "SELECT \"product\".\"product_id\", "
+ "MIN(\"sales_fact_1997\".\"store_id\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" "
+ "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1665">[CALCITE-1665]
* Aggregates and having cannot be combined</a>. */
@Test void testSelectQueryWithGroupByHaving3() {
String query = " select * from (select \"product\".\"product_id\",\n"
+ " min(\"sales_fact_1997\".\"store_id\")\n"
+ " from \"product\"\n"
+ " inner join \"sales_fact_1997\"\n"
+ " on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ " group by \"product\".\"product_id\"\n"
+ " having count(*) > 1) where \"product_id\" > 100";
String expected = "SELECT *\n"
+ "FROM (SELECT \"product\".\"product_id\","
+ " MIN(\"sales_fact_1997\".\"store_id\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" ON \"product\".\"product_id\" = "
+ "\"sales_fact_1997\".\"product_id\"\n"
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1) AS \"t2\"\n"
+ "WHERE \"t2\".\"product_id\" > 100";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3811">[CALCITE-3811]
* JDBC adapter generates SQL with invalid field names if Filter's row type
* is different from its input</a>. */
@Test void testHavingAlias() {
final RelBuilder builder = relBuilder();
builder.scan("EMP")
.project(builder.alias(builder.field("DEPTNO"), "D"))
.aggregate(builder.groupKey(builder.field("D")),
builder.countStar("emps.count"))
.filter(
builder.call(SqlStdOperatorTable.LESS_THAN,
builder.field("emps.count"), builder.literal(2)));
final LogicalFilter filter = (LogicalFilter) builder.build();
assertThat(filter.getRowType().getFieldNames().toString(),
is("[D, emps.count]"));
// Create a LogicalAggregate similar to the input of filter, but with different
// field names.
final LogicalAggregate newAggregate =
(LogicalAggregate) builder.scan("EMP")
.project(builder.alias(builder.field("DEPTNO"), "D2"))
.aggregate(builder.groupKey(builder.field("D2")),
builder.countStar("emps.count"))
.build();
assertThat(newAggregate.getRowType().getFieldNames().toString(),
is("[D2, emps.count]"));
// Change filter's input. Its row type does not change.
filter.replaceInput(0, newAggregate);
assertThat(filter.getRowType().getFieldNames().toString(),
is("[D, emps.count]"));
final RelNode root =
builder.push(filter)
.project(builder.alias(builder.field("D"), "emps.deptno"))
.build();
final String expectedMysql = "SELECT `D2` AS `emps.deptno`\n"
+ "FROM (SELECT `DEPTNO` AS `D2`, COUNT(*) AS `emps.count`\n"
+ "FROM `scott`.`EMP`\n"
+ "GROUP BY `D2`\n"
+ "HAVING `emps.count` < 2) AS `t1`";
final String expectedPostgresql = "SELECT \"DEPTNO\" AS \"emps.deptno\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "GROUP BY \"DEPTNO\"\n"
+ "HAVING COUNT(*) < 2";
final String expectedBigQuery = "SELECT D2 AS `emps.deptno`\n"
+ "FROM (SELECT DEPTNO AS D2, COUNT(*) AS `emps.count`\n"
+ "FROM scott.EMP\n"
+ "GROUP BY D2\n"
+ "HAVING `emps.count` < 2) AS t1";
relFn(b -> root)
.withMysql().ok(expectedMysql)
.withPostgresql().ok(expectedPostgresql)
.withBigQuery().ok(expectedBigQuery);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3896">[CALCITE-3896]
* JDBC adapter, when generating SQL, changes target of ambiguous HAVING
* clause with a Project on Filter on Aggregate</a>.
*
* <p>The alias is ambiguous in dialects such as MySQL and BigQuery that
* have {@link SqlConformance#isHavingAlias()} = true. When the HAVING clause
* tries to reference a column, it sees the alias instead. */
@Test void testHavingAliasSameAsColumnIgnoringCase() {
checkHavingAliasSameAsColumn(true);
}
@Test void testHavingAliasSameAsColumn() {
checkHavingAliasSameAsColumn(false);
}
private void checkHavingAliasSameAsColumn(boolean upperAlias) {
final String alias = upperAlias ? "GROSS_WEIGHT" : "gross_weight";
final String query = "select \"product_id\" + 1,\n"
+ " sum(\"gross_weight\") as \"" + alias + "\"\n"
+ "from \"product\"\n"
+ "group by \"product_id\"\n"
+ "having sum(\"product\".\"gross_weight\") < 200";
// PostgreSQL has isHavingAlias=false, case-sensitive=true
final String expectedPostgresql = "SELECT \"product_id\" + 1,"
+ " SUM(\"gross_weight\") AS \"" + alias + "\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING SUM(\"gross_weight\") < 200";
// MySQL has isHavingAlias=true, case-sensitive=true
final String expectedMysql = "SELECT `product_id` + 1, `" + alias + "`\n"
+ "FROM (SELECT `product_id`, SUM(`gross_weight`) AS `" + alias + "`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_id`\n"
+ "HAVING `" + alias + "` < 200) AS `t1`";
// BigQuery has isHavingAlias=true, case-sensitive=false
final String expectedBigQuery = upperAlias
? "SELECT product_id + 1, GROSS_WEIGHT\n"
+ "FROM (SELECT product_id, SUM(gross_weight) AS GROSS_WEIGHT\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id\n"
+ "HAVING GROSS_WEIGHT < 200) AS t1"
// Before [CALCITE-3896] was fixed, we got
// "HAVING SUM(gross_weight) < 200) AS t1"
// which on BigQuery gives you an error about aggregating aggregates
: "SELECT product_id + 1, gross_weight\n"
+ "FROM (SELECT product_id, SUM(gross_weight) AS gross_weight\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id\n"
+ "HAVING gross_weight < 200) AS t1";
sql(query)
.withPostgresql().ok(expectedPostgresql)
.withMysql().ok(expectedMysql)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testHaving4() {
final String query = "select \"product_id\"\n"
+ "from (\n"
+ " select \"product_id\", avg(\"gross_weight\") as agw\n"
+ " from \"product\"\n"
+ " where \"net_weight\" < 100\n"
+ " group by \"product_id\")\n"
+ "where agw > 50\n"
+ "group by \"product_id\"\n"
+ "having avg(agw) > 60\n";
final String expected = "SELECT \"product_id\"\n"
+ "FROM (SELECT \"product_id\", AVG(\"gross_weight\") AS \"AGW\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" < 100\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING AVG(\"gross_weight\") > 50) AS \"t2\"\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING AVG(\"AGW\") > 60";
sql(query).ok(expected);
}
@Test void testSelectQueryWithOrderByClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"net_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithOrderByClause1() {
String query =
"select \"product_id\", \"net_weight\" from \"product\" order by \"net_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithTwoOrderByClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"net_weight\", \"gross_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\","
+ " \"gross_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\", \"gross_weight\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithAscDescOrderByClause() {
String query = "select \"product_id\" from \"product\" "
+ "order by \"net_weight\" asc, \"gross_weight\" desc, \"low_fat\"";
final String expected = "SELECT"
+ " \"product_id\", \"net_weight\", \"gross_weight\", \"low_fat\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\", \"gross_weight\" DESC, \"low_fat\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3440">[CALCITE-3440]
* RelToSqlConverter does not properly alias ambiguous ORDER BY</a>. */
@Test void testOrderByColumnWithSameNameAsAlias() {
String query = "select \"product_id\" as \"p\",\n"
+ " \"net_weight\" as \"product_id\"\n"
+ "from \"product\"\n"
+ "order by 1";
final String expected = "SELECT \"product_id\" AS \"p\","
+ " \"net_weight\" AS \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"p\"";
sql(query).ok(expected);
}
@Test void testOrderByColumnWithSameNameAsAlias2() {
// We use ordinal "2" because the column name "product_id" is obscured
// by alias "product_id".
String query = "select \"net_weight\" as \"product_id\",\n"
+ " \"product_id\" as \"product_id\"\n"
+ "from \"product\"\n"
+ "order by \"product\".\"product_id\"";
final String expected = "SELECT \"net_weight\" AS \"product_id\","
+ " \"product_id\" AS \"product_id0\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id0\"";
final String expectedMysql = "SELECT `net_weight` AS `product_id`,"
+ " `product_id` AS `product_id0`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id0` IS NULL, `product_id0`";
sql(query).ok(expected)
.withMysql().ok(expectedMysql);
}
@Test void testHiveSelectCharset() {
String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10)) "
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT hire_date, CAST(hire_date AS VARCHAR(10))\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3282">[CALCITE-3282]
* HiveSqlDialect unparse Interger type as Int in order
* to be compatible with Hive1.x</a>. */
@Test void testHiveCastAsInt() {
String query = "select cast( cast(\"employee_id\" as varchar) as int) "
+ "from \"foodmart\".\"reserve_employee\" ";
final String expected = "SELECT CAST(CAST(employee_id AS VARCHAR) AS INT)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testBigQueryCast() {
String query = "select cast(cast(\"employee_id\" as varchar) as bigint), "
+ "cast(cast(\"employee_id\" as varchar) as smallint), "
+ "cast(cast(\"employee_id\" as varchar) as tinyint), "
+ "cast(cast(\"employee_id\" as varchar) as integer), "
+ "cast(cast(\"employee_id\" as varchar) as float), "
+ "cast(cast(\"employee_id\" as varchar) as char), "
+ "cast(cast(\"employee_id\" as varchar) as binary), "
+ "cast(cast(\"employee_id\" as varchar) as varbinary), "
+ "cast(cast(\"employee_id\" as varchar) as timestamp), "
+ "cast(cast(\"employee_id\" as varchar) as double), "
+ "cast(cast(\"employee_id\" as varchar) as decimal), "
+ "cast(cast(\"employee_id\" as varchar) as date), "
+ "cast(cast(\"employee_id\" as varchar) as time), "
+ "cast(cast(\"employee_id\" as varchar) as boolean) "
+ "from \"foodmart\".\"reserve_employee\" ";
final String expected = "SELECT CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS FLOAT64), "
+ "CAST(CAST(employee_id AS STRING) AS STRING), "
+ "CAST(CAST(employee_id AS STRING) AS BYTES), "
+ "CAST(CAST(employee_id AS STRING) AS BYTES), "
+ "CAST(CAST(employee_id AS STRING) AS DATETIME), "
+ "CAST(CAST(employee_id AS STRING) AS FLOAT64), "
+ "CAST(CAST(employee_id AS STRING) AS NUMERIC), "
+ "CAST(CAST(employee_id AS STRING) AS DATE), "
+ "CAST(CAST(employee_id AS STRING) AS TIME), "
+ "CAST(CAST(employee_id AS STRING) AS BOOL)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withBigQuery().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3220">[CALCITE-3220]
* HiveSqlDialect should transform the SQL-standard TRIM function to TRIM,
* LTRIM or RTRIM</a>,
* <a href="https://issues.apache.org/jira/browse/CALCITE-3663">[CALCITE-3663]
* Support for TRIM function in BigQuery dialect</a>, and
* <a href="https://issues.apache.org/jira/browse/CALCITE-3771">[CALCITE-3771]
* Support of TRIM function for SPARK dialect and improvement in HIVE
* Dialect</a>. */
@Test void testHiveSparkAndBqTrim() {
final String query = "SELECT TRIM(' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
@Test void testHiveSparkAndBqTrimWithBoth() {
final String query = "SELECT TRIM(both ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM ' str ')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
@Test void testHiveSparkAndBqTrimWithLeading() {
final String query = "SELECT TRIM(LEADING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
@Test void testHiveSparkAndBqTrimWithTailing() {
final String query = "SELECT TRIM(TRAILING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3663">[CALCITE-3663]
* Support for TRIM function in BigQuery dialect</a>. */
@Test void testBqTrimWithLeadingChar() {
final String query = "SELECT TRIM(LEADING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM('abcd', 'a')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE('abcd', '^(a)*', '')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3771">[CALCITE-3771]
* Support of TRIM function for SPARK dialect and improvement in HIVE Dialect</a>. */
@Test void testHiveAndSparkTrimWithLeadingChar() {
final String query = "SELECT TRIM(LEADING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('abcd', '^(a)*', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING 'a' FROM 'abcd')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test void testBqTrimWithBothChar() {
final String query = "SELECT TRIM(both 'a' from 'abcda')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('abcda', 'a')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test void testHiveAndSparkTrimWithBothChar() {
final String query = "SELECT TRIM(both 'a' from 'abcda')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('abcda', '^(a)*|(a)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH 'a' FROM 'abcda')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test void testHiveBqTrimWithTailingChar() {
final String query = "SELECT TRIM(TRAILING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM('abcd', 'a')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testTrim() {
final String query = "SELECT TRIM(\"full_name\")\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(full_name)\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM(\"full_name\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM full_name)\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithBoth() {
final String query = "SELECT TRIM(both ' ' from \"full_name\")\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(full_name)\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM full_name)\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM(\"full_name\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedMsSql = "SELECT TRIM(' ' FROM [full_name])\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testTrimWithLeadingSpace() {
final String query = "SELECT TRIM(LEADING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
final String expectedSnowFlake = "SELECT LTRIM(' str ')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedMsSql = "SELECT LTRIM(' str ')\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testTrimWithTailingSpace() {
final String query = "SELECT TRIM(TRAILING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING ' ' FROM ' str ')"
+ "\nFROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT RTRIM(' str ')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedMsSql = "SELECT RTRIM(' str ')\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testTrimWithLeadingCharacter() {
final String query = "SELECT TRIM(LEADING 'A' from \"first_name\")\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM(first_name, 'A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING 'A' FROM first_name)\nFROM foodmart"
+ ".reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE(first_name, '^(A)*', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT LTRIM(\"first_name\", 'A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithTrailingCharacter() {
final String query = "SELECT TRIM(TRAILING 'A' from 'AABCAADCAA')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM('AABCAADCAA', 'A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING 'A' FROM 'AABCAADCAA')\nFROM foodmart"
+ ".reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE('AABCAADCAA', '(A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT RTRIM('AABCAADCAA', 'A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithBothCharacter() {
final String query = "SELECT TRIM(BOTH 'A' from 'AABCAADCAA')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('AABCAADCAA', 'A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH 'A' FROM 'AABCAADCAA')\nFROM foodmart"
+ ".reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE('AABCAADCAA', '^(A)*|(A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM('AABCAADCAA', 'A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithLeadingSpecialCharacter() {
final String query = "SELECT TRIM(LEADING 'A$@*' from 'A$@*AABCA$@*AADCAA$@*')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM('A$@*AABCA$@*AADCAA$@*', 'A$@*')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT REGEXP_REPLACE('A$@*AABCA$@*AADCAA$@*', '^(A\\$\\@\\*)*', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING 'A$@*' FROM 'A$@*AABCA$@*AADCAA$@*')\nFROM"
+ " foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT LTRIM('A$@*AABCA$@*AADCAA$@*', 'A$@*')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithTrailingSpecialCharacter() {
final String query = "SELECT TRIM(TRAILING '$A@*' from '$A@*AABC$@*AADCAA$A@*')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM('$A@*AABC$@*AADCAA$A@*', '$A@*')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT REGEXP_REPLACE('$A@*AABC$@*AADCAA$A@*', '(\\$A\\@\\*)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING '$A@*' FROM '$A@*AABC$@*AADCAA$A@*')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT RTRIM('$A@*AABC$@*AADCAA$A@*', '$A@*')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('$@*AABC$@*AADCAA$@*A', '$@*A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT REGEXP_REPLACE('$@*AABC$@*AADCAA$@*A',"
+ " '^(\\$\\@\\*A)*|(\\$\\@\\*A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH '$@*A' FROM '$@*AABC$@*AADCAA$@*A')\nFROM "
+ "foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM('$@*AABC$@*AADCAA$@*A', '$@*A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithFunction() {
final String query = "SELECT TRIM(substring(\"full_name\" from 2 for 3))\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(SUBSTR(full_name, 2, 3))\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT TRIM(SUBSTRING(full_name, 2, 3))\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM SUBSTRING(full_name, 2, 3))\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM(SUBSTR(\"full_name\", 2, 3))\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test void testHiveAndSparkTrimWithTailingChar() {
final String query = "SELECT TRIM(TRAILING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('abcd', '(a)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING 'a' FROM 'abcd')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test void testBqTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('$@*AABC$@*AADCAA$@*A', '$@*A')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test void testHiveAndSparkTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('$@*AABC$@*AADCAA$@*A',"
+ " '^(\\$\\@\\*A)*|(\\$\\@\\*A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH '$@*A' FROM '$@*AABC$@*AADCAA$@*A')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2715">[CALCITE-2715]
* MS SQL Server does not support character set as part of data type</a>. */
@Test void testMssqlCharacterSet() {
String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10))\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT [hire_date], CAST([hire_date] AS VARCHAR(10))\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query).withMssql().ok(expected);
}
/**
* Tests that IN can be un-parsed.
*
* <p>This cannot be tested using "sql", because because Calcite's SQL parser
* replaces INs with ORs or sub-queries.
*/
@Test void testUnparseIn1() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.filter(b.in(b.field("DEPTNO"), b.literal(21)))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPTNO\" = 21";
relFn(relFn).ok(expectedSql);
}
@Test void testUnparseIn2() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(b.in(b.field("DEPTNO"), b.literal(20), b.literal(21)))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPTNO\" IN (20, 21)";
relFn(relFn).ok(expectedSql);
}
@Test void testUnparseInStruct1() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.filter(
b.in(
b.call(SqlStdOperatorTable.ROW,
b.field("DEPTNO"), b.field("JOB")),
b.call(SqlStdOperatorTable.ROW, b.literal(1),
b.literal("PRESIDENT"))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE ROW(\"DEPTNO\", \"JOB\") = ROW(1, 'PRESIDENT')";
relFn(relFn).ok(expectedSql);
}
@Test void testUnparseInStruct2() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.filter(
b.in(
b.call(SqlStdOperatorTable.ROW,
b.field("DEPTNO"), b.field("JOB")),
b.call(SqlStdOperatorTable.ROW, b.literal(1),
b.literal("PRESIDENT")),
b.call(SqlStdOperatorTable.ROW, b.literal(2),
b.literal("PRESIDENT"))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE ROW(\"DEPTNO\", \"JOB\") IN (ROW(1, 'PRESIDENT'), ROW(2, 'PRESIDENT'))";
relFn(relFn).ok(expectedSql);
}
@Test public void testScalarQueryWithBigQuery() {
final RelBuilder builder = relBuilder();
final RelNode scalarQueryRel = builder.
scan("DEPT")
.filter(builder.equals(builder.field("DEPTNO"), builder.literal(40)))
.project(builder.field(0))
.build();
final RelNode root = builder
.scan("EMP")
.aggregate(builder.groupKey("EMPNO"),
builder.aggregateCall(SqlStdOperatorTable.SINGLE_VALUE,
RexSubQuery.scalar(scalarQueryRel)).as("SC_DEPTNO"),
builder.count(builder.literal(1)).as("pid"))
.build();
final String expectedBigQuery = "SELECT EMPNO, (((SELECT DEPTNO\n"
+ "FROM scott.DEPT\n"
+ "WHERE DEPTNO = 40))) AS SC_DEPTNO, COUNT(1) AS pid\n"
+ "FROM scott.EMP\n"
+ "GROUP BY EMPNO";
final String expectedSnowflake = "SELECT \"EMPNO\", (((SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE \"DEPTNO\" = 40))) AS \"SC_DEPTNO\", COUNT(1) AS \"pid\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "GROUP BY \"EMPNO\"";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()),
isLinux(expectedBigQuery));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()),
isLinux(expectedSnowflake));
}
@Test void testSelectQueryWithLimitClause() {
String query = "select \"product_id\" from \"product\" limit 100 offset 10";
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "LIMIT 100\nOFFSET 10";
sql(query).withHive().ok(expected);
}
@Test void testPositionFunctionForHive() {
final String query = "select position('A' IN 'ABC') from \"product\"";
final String expected = "SELECT INSTR('ABC', 'A')\n"
+ "FROM foodmart.product";
sql(query).withHive().ok(expected);
}
@Test void testPositionFunctionForBigQuery() {
final String query = "select position('A' IN 'ABC') from \"product\"";
final String expected = "SELECT STRPOS('ABC', 'A')\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test void testPositionFunctionWithSlashForBigQuery() {
final String query = "select position('\\,' IN 'ABC') from \"product\"";
final String expected = "SELECT STRPOS('ABC', '\\\\,')\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
/** Tests that we escape single-quotes in character literals using back-slash
* in BigQuery. The norm is to escape single-quotes with single-quotes. */
@Test void testCharLiteralForBigQuery() {
final String query = "select 'that''s all folks!' from \"product\"";
final String expectedPostgresql = "SELECT 'that''s all folks!'\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedBigQuery = "SELECT 'that\\'s all folks!'\n"
+ "FROM foodmart.product";
sql(query)
.withPostgresql().ok(expectedPostgresql)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testIdentifier() {
// Note that IGNORE is reserved in BigQuery but not in standard SQL
final String query = "select *\n"
+ "from (\n"
+ " select 1 as \"one\", 2 as \"tWo\", 3 as \"THREE\",\n"
+ " 4 as \"fo$ur\", 5 as \"ignore\"\n"
+ " from \"foodmart\".\"days\") as \"my$table\"\n"
+ "where \"one\" < \"tWo\" and \"THREE\" < \"fo$ur\"";
final String expectedBigQuery = "SELECT *\n"
+ "FROM (SELECT 1 AS one, 2 AS tWo, 3 AS THREE,"
+ " 4 AS `fo$ur`, 5 AS `ignore`\n"
+ "FROM foodmart.days) AS t\n"
+ "WHERE one < tWo AND THREE < `fo$ur`";
final String expectedMysql = "SELECT *\n"
+ "FROM (SELECT 1 AS `one`, 2 AS `tWo`, 3 AS `THREE`,"
+ " 4 AS `fo$ur`, 5 AS `ignore`\n"
+ "FROM `foodmart`.`days`) AS `t`\n"
+ "WHERE `one` < `tWo` AND `THREE` < `fo$ur`";
final String expectedPostgresql = "SELECT *\n"
+ "FROM (SELECT 1 AS \"one\", 2 AS \"tWo\", 3 AS \"THREE\","
+ " 4 AS \"fo$ur\", 5 AS \"ignore\"\n"
+ "FROM \"foodmart\".\"days\") AS \"t\"\n"
+ "WHERE \"one\" < \"tWo\" AND \"THREE\" < \"fo$ur\"";
final String expectedOracle = expectedPostgresql.replace(" AS ", " ");
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
.withPostgresql().ok(expectedPostgresql);
}
@Test void testModFunctionForHive() {
final String query = "select mod(11,3) from \"product\"";
final String expected = "SELECT 11 % 3\n"
+ "FROM foodmart.product";
sql(query).withHive().ok(expected);
}
@Test void testUnionOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "UNION select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "UNION DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test void testUnionAllOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "UNION ALL select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "UNION ALL\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test void testIntersectOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "INTERSECT select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test public void testIntersectOrderBy() {
final String query = "select * from (select \"product_id\" from \"product\"\n"
+ "INTERSECT select \"product_id\" from \"product\") t order by t.\"product_id\"";
final String expectedBigQuery = "SELECT *\n"
+ "FROM (SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT product_id\n"
+ "FROM foodmart.product) AS t1\n"
+ "ORDER BY product_id IS NULL, product_id";
sql(query).withBigQuery().ok(expectedBigQuery);
}
@Test public void testIntersectWithWhere() {
final String query = "select * from (select \"product_id\" from \"product\"\n"
+ "INTERSECT select \"product_id\" from \"product\") t where t.\"product_id\"<=14";
final String expectedBigQuery = "SELECT *\n"
+ "FROM (SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT product_id\n"
+ "FROM foodmart.product) AS t1\n"
+ "WHERE product_id <= 14";
sql(query).withBigQuery().ok(expectedBigQuery);
}
@Test public void testIntersectWithGroupBy() {
final String query = "select * from (select \"product_id\" from \"product\"\n"
+ "INTERSECT select \"product_id\" from \"product\") t group by \"product_id\"";
final String expectedBigQuery = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT product_id\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expectedBigQuery);
}
@Test public void testExceptOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "EXCEPT select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "EXCEPT DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test public void testSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
// Hive and MSSQL do not support NULLS FIRST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL DESC, product_id DESC";
final String expectedSpark = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC NULLS FIRST";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 0 ELSE 1 END, [product_id] DESC";
sql(query)
.withSpark()
.ok(expectedSpark)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test void testSelectOrderByDescNullsFirst() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
// Hive and MSSQL do not support NULLS FIRST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL DESC, product_id DESC";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 0 ELSE 1 END, [product_id] DESC";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test void testSelectOrderByAscNullsLast() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
// Hive and MSSQL do not support NULLS LAST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL, product_id";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test public void testSelectQueryWithOrderByAscAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
// Hive and MSSQL do not support NULLS LAST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL, product_id";
final String expectedSpark = "SELECT product_id\nFROM foodmart.product\n"
+ "ORDER BY product_id NULLS LAST";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
sql(query)
.withSpark()
.ok(expectedSpark)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test public void testSelectQueryWithOrderByAscNullsFirstShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
// Hive and MSSQL do not support NULLS FIRST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id]";
sql(query)
.withSpark()
.ok(expected)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test void testSelectOrderByAscNullsFirst() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
// Hive and MSSQL do not support NULLS FIRST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id]";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test public void testSelectQueryWithOrderByDescNullsLastShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
// Hive and MSSQL do not support NULLS LAST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id] DESC";
sql(query)
.withSpark()
.ok(expected)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test void testSelectOrderByDescNullsLast() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
// Hive and MSSQL do not support NULLS LAST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id] DESC";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test void testHiveSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY hire_date IS NULL DESC, hire_date DESC)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testHiveSelectQueryWithOverAscAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date IS NULL, hire_date)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testHiveSelectQueryWithOverAscNullsFirstShouldNotAddNullEmulation() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testCharLengthFunctionEmulationForHiveAndBigqueryAndSpark() {
final String query = "select char_length('xyz') from \"product\"";
final String expected = "SELECT LENGTH('xyz')\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT LENGTH('xyz')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withSpark()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testCharacterLengthFunctionEmulationForHiveAndBigqueryAndSpark() {
final String query = "select character_length('xyz') from \"product\"";
final String expected = "SELECT LENGTH('xyz')\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT LENGTH('xyz')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withSpark()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test void testHiveSubstringWithLength() {
String query = "SELECT SUBSTRING('ABC', 2, 3)"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2, 3)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testHiveSubstringWithANSI() {
String query = "SELECT SUBSTRING('ABC' FROM 2)"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testHiveSubstringWithANSIAndLength() {
String query = "SELECT SUBSTRING('ABC' FROM 2 FOR 3)"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2, 3)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testHiveSelectQueryWithOverDescNullsLastShouldNotAddNullEmulation() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date DESC)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testMysqlCastToBigint() {
// MySQL does not allow cast to BIGINT; instead cast to SIGNED.
final String query = "select cast(\"product_id\" as bigint) from \"product\"";
final String expected = "SELECT CAST(`product_id` AS SIGNED)\n"
+ "FROM `foodmart`.`product`";
sql(query).withMysql().ok(expected);
}
@Test void testMysqlCastToInteger() {
// MySQL does not allow cast to INTEGER; instead cast to SIGNED.
final String query = "select \"employee_id\",\n"
+ " cast(\"salary_paid\" * 10000 as integer)\n"
+ "from \"salary\"";
final String expected = "SELECT `employee_id`,"
+ " CAST(`salary_paid` * 10000 AS SIGNED)\n"
+ "FROM `foodmart`.`salary`";
sql(query).withMysql().ok(expected);
}
@Test void testHiveSelectQueryWithOrderByDescAndHighNullsWithVersionGreaterThanOrEq21() {
final HiveSqlDialect hive2_1Dialect =
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(1)
.withNullCollation(NullCollation.LOW));
final HiveSqlDialect hive2_2_Dialect =
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(2)
.withNullCollation(NullCollation.LOW));
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC NULLS FIRST";
sql(query).dialect(hive2_1Dialect).ok(expected);
sql(query).dialect(hive2_2_Dialect).ok(expected);
}
@Test void testHiveSelectQueryWithOverDescAndHighNullsWithVersionGreaterThanOrEq21() {
final HiveSqlDialect hive2_1Dialect =
new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(1)
.withNullCollation(NullCollation.LOW));
final HiveSqlDialect hive2_2_Dialect =
new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(2)
.withNullCollation(NullCollation.LOW));
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date DESC NULLS FIRST)\n"
+ "FROM foodmart.employee";
sql(query).dialect(hive2_1Dialect).ok(expected);
sql(query).dialect(hive2_2_Dialect).ok(expected);
}
@Test void testHiveSelectQueryWithOrderByDescAndHighNullsWithVersion20() {
final HiveSqlDialect hive2_1_0_Dialect =
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(0)
.withNullCollation(NullCollation.LOW));
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL DESC, product_id DESC";
sql(query).dialect(hive2_1_0_Dialect).ok(expected);
}
@Test void testHiveSelectQueryWithOverDescAndHighNullsWithVersion20() {
final HiveSqlDialect hive2_1_0_Dialect =
new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(0)
.withNullCollation(NullCollation.LOW));
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY hire_date IS NULL DESC, hire_date DESC)\n"
+ "FROM foodmart.employee";
sql(query).dialect(hive2_1_0_Dialect).ok(expected);
}
@Test void testJethroDataSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\", \"product_id\" DESC";
sql(query).dialect(jethroDataSqlDialect()).ok(expected);
}
@Test void testJethroDataSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY \"hire_date\", \"hire_date\" DESC)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).dialect(jethroDataSqlDialect()).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByAscAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverAscAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByAscNullsFirstShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverAscNullsFirstShouldNotAddNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByDescNullsLastShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverDescNullsLastShouldNotAddNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlCastToVarcharWithLessThanMaxPrecision() {
final String query = "select cast(\"product_id\" as varchar(50)), \"product_id\" "
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS CHAR(50)), `product_id`\n"
+ "FROM `foodmart`.`product`";
sql(query).withMysql().ok(expected);
}
@Test void testMySqlCastToTimestamp() {
final String query = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > cast(\"hire_date\" as TIMESTAMP) ";
final String expected = "SELECT *\nFROM `foodmart`.`employee`"
+ "\nWHERE (`hire_date` - INTERVAL '19800' SECOND) > CAST(`hire_date` AS DATETIME)";
sql(query).withMysql().ok(expected);
}
@Test void testMySqlCastToVarcharWithGreaterThanMaxPrecision() {
final String query = "select cast(\"product_id\" as varchar(500)), \"product_id\" "
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS CHAR(255)), `product_id`\n"
+ "FROM `foodmart`.`product`";
sql(query).withMysql().ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByAscNullsLastAndNoEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverAscNullsLastAndNoEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByAscNullsFirstAndNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverAscNullsFirstAndNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByDescNullsFirstAndNoEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverDescNullsFirstAndNoEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByDescNullsLastAndNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverDescNullsLastAndNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByDescAndNullsFirstShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverDescAndNullsFirstShouldNotBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByAscAndNullsFirstShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverAscAndNullsFirstShouldNotBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByDescAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverDescAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByAscAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverAscAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByAscAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverAscAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByDescAndNullsLastShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverDescAndNullsLastShouldNotBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByAscAndNullsLastShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverAscAndNullsLastShouldNotBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testCastToVarchar() {
String query = "select cast(\"product_id\" as varchar) from \"product\"";
final String expectedClickHouse = "SELECT CAST(`product_id` AS `String`)\n"
+ "FROM `foodmart`.`product`";
final String expectedMysql = "SELECT CAST(`product_id` AS CHAR)\n"
+ "FROM `foodmart`.`product`";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withMysql()
.ok(expectedMysql);
}
@Test void testSelectQueryWithLimitClauseWithoutOrder() {
String query = "select \"product_id\" from \"product\" limit 100 offset 10";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
final String expectedClickHouse = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "LIMIT 10, 100";
sql(query)
.ok(expected)
.withClickHouse()
.ok(expectedClickHouse);
final String expectedPresto = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "OFFSET 10\n"
+ "LIMIT 100";
sql(query)
.ok(expected)
.withPresto()
.ok(expectedPresto);
}
@Test void testSelectQueryWithLimitOffsetClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"net_weight\" asc limit 100 offset 10";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
// BigQuery uses LIMIT/OFFSET, and nulls sort low by default
final String expectedBigQuery = "SELECT product_id, net_weight\n"
+ "FROM foodmart.product\n"
+ "ORDER BY net_weight IS NULL, net_weight\n"
+ "LIMIT 100\n"
+ "OFFSET 10";
sql(query).ok(expected)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testSelectQueryWithParameters() {
String query = "select * from \"product\" "
+ "where \"product_id\" = ? "
+ "AND ? >= \"shelf_width\"";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = ? "
+ "AND ? >= \"shelf_width\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithFetchOffsetClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" offset 10 rows fetch next 100 rows only";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
sql(query).ok(expected);
}
@Test void testSelectQueryWithFetchClause() {
String query = "select \"product_id\"\n"
+ "from \"product\"\n"
+ "order by \"product_id\" fetch next 100 rows only";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "FETCH NEXT 100 ROWS ONLY";
final String expectedMssql10 = "SELECT TOP (100) [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]\n"
+ "FETCH NEXT 100 ROWS ONLY";
final String expectedSybase = "SELECT TOP (100) product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
sql(query).ok(expected)
.withMssql(10).ok(expectedMssql10)
.withMssql(11).ok(expectedMssql)
.withMssql(14).ok(expectedMssql)
.withSybase().ok(expectedSybase);
}
@Test void testSelectQueryComplex() {
String query =
"select count(*), \"units_per_case\" from \"product\" where \"cases_per_pallet\" > 100 "
+ "group by \"product_id\", \"units_per_case\" order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), \"units_per_case\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"cases_per_pallet\" > 100\n"
+ "GROUP BY \"product_id\", \"units_per_case\"\n"
+ "ORDER BY \"units_per_case\" DESC";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroup() {
String query = "select"
+ " count(*), sum(\"employee_id\") from \"reserve_employee\" "
+ "where \"hire_date\" > '2015-01-01' "
+ "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM') "
+ "group by \"store_id\", \"position_title\"";
final String expected = "SELECT COUNT(*), SUM(\"employee_id\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"\n"
+ "WHERE \"hire_date\" > '2015-01-01' "
+ "AND (\"position_title\" = 'SDE' OR \"position_title\" = 'SDM')\n"
+ "GROUP BY \"store_id\", \"position_title\"";
sql(query).ok(expected);
}
@Test void testSimpleJoin() {
String query = "select *\n"
+ "from \"sales_fact_1997\" as s\n"
+ "join \"customer\" as c on s.\"customer_id\" = c.\"customer_id\"\n"
+ "join \"product\" as p on s.\"product_id\" = p.\"product_id\"\n"
+ "join \"product_class\" as pc\n"
+ " on p.\"product_class_id\" = pc.\"product_class_id\"\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'\n";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\""
+ ".\"customer_id\"\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\"\n"
+ "INNER JOIN \"foodmart\".\"product_class\" "
+ "ON \"product\".\"product_class_id\" = \"product_class\""
+ ".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' AND "
+ "\"product_class\".\"product_department\" = 'Snacks'";
sql(query).ok(expected);
}
@Test void testSimpleJoinUsing() {
String query = "select *\n"
+ "from \"sales_fact_1997\" as s\n"
+ " join \"customer\" as c using (\"customer_id\")\n"
+ " join \"product\" as p using (\"product_id\")\n"
+ " join \"product_class\" as pc using (\"product_class_id\")\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'\n";
final String expected = "SELECT"
+ " \"product\".\"product_class_id\","
+ " \"sales_fact_1997\".\"product_id\","
+ " \"sales_fact_1997\".\"customer_id\","
+ " \"sales_fact_1997\".\"time_id\","
+ " \"sales_fact_1997\".\"promotion_id\","
+ " \"sales_fact_1997\".\"store_id\","
+ " \"sales_fact_1997\".\"store_sales\","
+ " \"sales_fact_1997\".\"store_cost\","
+ " \"sales_fact_1997\".\"unit_sales\","
+ " \"customer\".\"account_num\","
+ " \"customer\".\"lname\","
+ " \"customer\".\"fname\","
+ " \"customer\".\"mi\","
+ " \"customer\".\"address1\","
+ " \"customer\".\"address2\","
+ " \"customer\".\"address3\","
+ " \"customer\".\"address4\","
+ " \"customer\".\"city\","
+ " \"customer\".\"state_province\","
+ " \"customer\".\"postal_code\","
+ " \"customer\".\"country\","
+ " \"customer\".\"customer_region_id\","
+ " \"customer\".\"phone1\","
+ " \"customer\".\"phone2\","
+ " \"customer\".\"birthdate\","
+ " \"customer\".\"marital_status\","
+ " \"customer\".\"yearly_income\","
+ " \"customer\".\"gender\","
+ " \"customer\".\"total_children\","
+ " \"customer\".\"num_children_at_home\","
+ " \"customer\".\"education\","
+ " \"customer\".\"date_accnt_opened\","
+ " \"customer\".\"member_card\","
+ " \"customer\".\"occupation\","
+ " \"customer\".\"houseowner\","
+ " \"customer\".\"num_cars_owned\","
+ " \"customer\".\"fullname\","
+ " \"product\".\"brand_name\","
+ " \"product\".\"product_name\","
+ " \"product\".\"SKU\","
+ " \"product\".\"SRP\","
+ " \"product\".\"gross_weight\","
+ " \"product\".\"net_weight\","
+ " \"product\".\"recyclable_package\","
+ " \"product\".\"low_fat\","
+ " \"product\".\"units_per_case\","
+ " \"product\".\"cases_per_pallet\","
+ " \"product\".\"shelf_width\","
+ " \"product\".\"shelf_height\","
+ " \"product\".\"shelf_depth\","
+ " \"product_class\".\"product_subcategory\","
+ " \"product_class\".\"product_category\","
+ " \"product_class\".\"product_department\","
+ " \"product_class\".\"product_family\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\""
+ ".\"customer_id\"\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\"\n"
+ "INNER JOIN \"foodmart\".\"product_class\" "
+ "ON \"product\".\"product_class_id\" = \"product_class\""
+ ".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' AND "
+ "\"product_class\".\"product_department\" = 'Snacks'";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1636">[CALCITE-1636]
* JDBC adapter generates wrong SQL for self join with sub-query</a>. */
@Test void testSubQueryAlias() {
String query = "select t1.\"customer_id\", t2.\"customer_id\"\n"
+ "from (select \"customer_id\" from \"sales_fact_1997\") as t1\n"
+ "inner join (select \"customer_id\" from \"sales_fact_1997\") t2\n"
+ "on t1.\"customer_id\" = t2.\"customer_id\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT sales_fact_1997.customer_id\n"
+ "FROM foodmart.sales_fact_1997 AS sales_fact_1997) AS t\n"
+ "INNER JOIN (SELECT sales_fact_19970.customer_id\n"
+ "FROM foodmart.sales_fact_1997 AS sales_fact_19970) AS t0 ON t.customer_id = t0"
+ ".customer_id";
sql(query).withDb2().ok(expected);
}
@Test void testCartesianProductWithCommaSyntax() {
String query = "select * from \"department\" , \"employee\"";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\",\n"
+ "\"foodmart\".\"employee\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2652">[CALCITE-2652]
* SqlNode to SQL conversion fails if the join condition references a BOOLEAN
* column</a>. */
@Test void testJoinOnBoolean() {
final String sql = "SELECT 1\n"
+ "from emps\n"
+ "join emp on (emp.deptno = emps.empno and manager)";
final String s = sql(sql).schema(CalciteAssert.SchemaSpec.POST).exec();
assertThat(s, notNullValue()); // sufficient that conversion did not throw
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-4249">[CALCITE-4249]
* JDBC adapter cannot translate NOT LIKE in join condition</a>. */
@Test void testJoinOnNotLike() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.scan("DEPT")
.join(JoinRelType.LEFT,
b.and(
b.equals(b.field(2, 0, "DEPTNO"),
b.field(2, 1, "DEPTNO")),
b.not(
b.call(SqlStdOperatorTable.LIKE,
b.field(2, 1, "DNAME"),
b.literal("ACCOUNTING")))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "LEFT JOIN \"scott\".\"DEPT\" "
+ "ON \"EMP\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" "
+ "AND \"DEPT\".\"DNAME\" NOT LIKE 'ACCOUNTING'";
relFn(relFn).ok(expectedSql);
}
@Test void testCartesianProductWithInnerJoinSyntax() {
String query = "select * from \"department\"\n"
+ "INNER JOIN \"employee\" ON TRUE";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\",\n"
+ "\"foodmart\".\"employee\"";
sql(query).ok(expected);
}
@Test void testFullJoinOnTrueCondition() {
String query = "select * from \"department\"\n"
+ "FULL JOIN \"employee\" ON TRUE";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\"\n"
+ "FULL JOIN \"foodmart\".\"employee\" ON TRUE";
sql(query).ok(expected);
}
@Disabled
@Test void testCaseOnSubQuery() {
String query = "SELECT CASE WHEN v.g IN (0, 1) THEN 0 ELSE 1 END\n"
+ "FROM (SELECT * FROM \"foodmart\".\"customer\") AS c,\n"
+ " (SELECT 0 AS g) AS v\n"
+ "GROUP BY v.g";
final String expected = "SELECT"
+ " CASE WHEN \"t0\".\"G\" IN (0, 1) THEN 0 ELSE 1 END\n"
+ "FROM (SELECT *\nFROM \"foodmart\".\"customer\") AS \"t\",\n"
+ "(VALUES (0)) AS \"t0\" (\"G\")\n"
+ "GROUP BY \"t0\".\"G\"";
sql(query).ok(expected);
}
@Test void testSimpleIn() {
String query = "select * from \"department\" where \"department_id\" in (\n"
+ " select \"department_id\" from \"employee\"\n"
+ " where \"store_id\" < 150)";
final String expected = "SELECT "
+ "\"department\".\"department_id\", \"department\""
+ ".\"department_description\"\n"
+ "FROM \"foodmart\".\"department\"\nINNER JOIN "
+ "(SELECT \"department_id\"\nFROM \"foodmart\".\"employee\"\n"
+ "WHERE \"store_id\" < 150\nGROUP BY \"department_id\") AS \"t1\" "
+ "ON \"department\".\"department_id\" = \"t1\".\"department_id\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1332">[CALCITE-1332]
* DB2 should always use aliases for tables: x.y.z AS z</a>. */
@Test void testDb2DialectJoinStar() {
String query = "select * "
+ "from \"foodmart\".\"employee\" A "
+ "join \"foodmart\".\"department\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelfJoinStar() {
String query = "select * "
+ "from \"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectJoin() {
String query = "select A.\"employee_id\", B.\"department_id\" "
+ "from \"foodmart\".\"employee\" A join \"foodmart\".\"department\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT"
+ " employee.employee_id, department.department_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelfJoin() {
String query = "select A.\"employee_id\", B.\"employee_id\" from "
+ "\"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT"
+ " employee.employee_id, employee0.employee_id AS employee_id0\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectWhere() {
String query = "select A.\"employee_id\" from "
+ "\"foodmart\".\"employee\" A where A.\"department_id\" < 1000";
final String expected = "SELECT employee.employee_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE employee.department_id < 1000";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectJoinWhere() {
String query = "select A.\"employee_id\", B.\"department_id\" "
+ "from \"foodmart\".\"employee\" A join \"foodmart\".\"department\" B\n"
+ "on A.\"department_id\" = B.\"department_id\" "
+ "where A.\"employee_id\" < 1000";
final String expected = "SELECT"
+ " employee.employee_id, department.department_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id\n"
+ "WHERE employee.employee_id < 1000";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelfJoinWhere() {
String query = "select A.\"employee_id\", B.\"employee_id\" from "
+ "\"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\" "
+ "where B.\"employee_id\" < 2000";
final String expected = "SELECT "
+ "employee.employee_id, employee0.employee_id AS employee_id0\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id\n"
+ "WHERE employee0.employee_id < 2000";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectCast() {
String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10)) "
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT reserve_employee.hire_date, "
+ "CAST(reserve_employee.hire_date AS VARCHAR(10))\n"
+ "FROM foodmart.reserve_employee AS reserve_employee";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelectQueryWithGroupByHaving() {
String query = "select count(*) from \"product\" "
+ "group by \"product_class_id\", \"product_id\" "
+ "having \"product_id\" > 10";
final String expected = "SELECT COUNT(*)\n"
+ "FROM foodmart.product AS product\n"
+ "GROUP BY product.product_class_id, product.product_id\n"
+ "HAVING product.product_id > 10";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelectQueryComplex() {
String query = "select count(*), \"units_per_case\" "
+ "from \"product\" where \"cases_per_pallet\" > 100 "
+ "group by \"product_id\", \"units_per_case\" "
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), product.units_per_case\n"
+ "FROM foodmart.product AS product\n"
+ "WHERE product.cases_per_pallet > 100\n"
+ "GROUP BY product.product_id, product.units_per_case\n"
+ "ORDER BY product.units_per_case DESC";
sql(query).withDb2().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-4090">[CALCITE-4090]
* DB2 aliasing breaks with a complex SELECT above a sub-query</a>. */
@Test void testDb2SubQueryAlias() {
String query = "select count(foo), \"units_per_case\"\n"
+ "from (select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
+ " from \"product\")\n"
+ "where \"cases_per_pallet\" > 100\n"
+ "group by \"product_id\", \"units_per_case\"\n"
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), t.units_per_case\n"
+ "FROM (SELECT product.units_per_case, product.cases_per_pallet, "
+ "product.product_id, 1 AS FOO\n"
+ "FROM foodmart.product AS product) AS t\n"
+ "WHERE t.cases_per_pallet > 100\n"
+ "GROUP BY t.product_id, t.units_per_case\n"
+ "ORDER BY t.units_per_case DESC";
sql(query).withDb2().ok(expected);
}
@Test void testDb2SubQueryFromUnion() {
String query = "select count(foo), \"units_per_case\"\n"
+ "from (select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
+ " from \"product\"\n"
+ " where \"cases_per_pallet\" > 100\n"
+ " union all\n"
+ " select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
+ " from \"product\"\n"
+ " where \"cases_per_pallet\" < 100)\n"
+ "where \"cases_per_pallet\" > 100\n"
+ "group by \"product_id\", \"units_per_case\"\n"
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), t3.units_per_case\n"
+ "FROM (SELECT product.units_per_case, product.cases_per_pallet, "
+ "product.product_id, 1 AS FOO\n"
+ "FROM foodmart.product AS product\n"
+ "WHERE product.cases_per_pallet > 100\n"
+ "UNION ALL\n"
+ "SELECT product0.units_per_case, product0.cases_per_pallet, "
+ "product0.product_id, 1 AS FOO\n"
+ "FROM foodmart.product AS product0\n"
+ "WHERE product0.cases_per_pallet < 100) AS t3\n"
+ "WHERE t3.cases_per_pallet > 100\n"
+ "GROUP BY t3.product_id, t3.units_per_case\n"
+ "ORDER BY t3.units_per_case DESC";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelectQueryWithGroup() {
String query = "select count(*), sum(\"employee_id\") "
+ "from \"reserve_employee\" "
+ "where \"hire_date\" > '2015-01-01' "
+ "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM') "
+ "group by \"store_id\", \"position_title\"";
final String expected = "SELECT"
+ " COUNT(*), SUM(reserve_employee.employee_id)\n"
+ "FROM foodmart.reserve_employee AS reserve_employee\n"
+ "WHERE reserve_employee.hire_date > '2015-01-01' "
+ "AND (reserve_employee.position_title = 'SDE' OR "
+ "reserve_employee.position_title = 'SDM')\n"
+ "GROUP BY reserve_employee.store_id, reserve_employee.position_title";
sql(query).withDb2().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1372">[CALCITE-1372]
* JDBC adapter generates SQL with wrong field names</a>. */
@Test void testJoinPlan2() {
final String sql = "SELECT v1.deptno, v2.deptno\n"
+ "FROM dept v1 LEFT JOIN emp v2 ON v1.deptno = v2.deptno\n"
+ "WHERE v2.job LIKE 'PRESIDENT'";
final String expected = "SELECT \"DEPT\".\"DEPTNO\","
+ " \"EMP\".\"DEPTNO\" AS \"DEPTNO0\"\n"
+ "FROM \"SCOTT\".\"DEPT\"\n"
+ "LEFT JOIN \"SCOTT\".\"EMP\""
+ " ON \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\"\n"
+ "WHERE \"EMP\".\"JOB\" LIKE 'PRESIDENT'";
// DB2 does not have implicit aliases, so generates explicit "AS DEPT"
// and "AS EMP"
final String expectedDb2 = "SELECT DEPT.DEPTNO, EMP.DEPTNO AS DEPTNO0\n"
+ "FROM SCOTT.DEPT AS DEPT\n"
+ "LEFT JOIN SCOTT.EMP AS EMP ON DEPT.DEPTNO = EMP.DEPTNO\n"
+ "WHERE EMP.JOB LIKE 'PRESIDENT'";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected)
.withDb2()
.ok(expectedDb2);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1422">[CALCITE-1422]
* In JDBC adapter, allow IS NULL and IS NOT NULL operators in generated SQL
* join condition</a>. */
@Test void testSimpleJoinConditionWithIsNullOperators() {
String query = "select *\n"
+ "from \"foodmart\".\"sales_fact_1997\" as \"t1\"\n"
+ "inner join \"foodmart\".\"customer\" as \"t2\"\n"
+ "on \"t1\".\"customer_id\" = \"t2\".\"customer_id\" or "
+ "(\"t1\".\"customer_id\" is null "
+ "and \"t2\".\"customer_id\" is null) or\n"
+ "\"t2\".\"occupation\" is null\n"
+ "inner join \"foodmart\".\"product\" as \"t3\"\n"
+ "on \"t1\".\"product_id\" = \"t3\".\"product_id\" or "
+ "(\"t1\".\"product_id\" is not null or "
+ "\"t3\".\"product_id\" is not null)";
// Some of the "IS NULL" and "IS NOT NULL" are reduced to TRUE or FALSE,
// but not all.
String expected = "SELECT *\nFROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\".\"customer_id\""
+ " OR FALSE AND FALSE"
+ " OR \"customer\".\"occupation\" IS NULL\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\""
+ " OR TRUE"
+ " OR TRUE";
// The hook prevents RelBuilder from removing "FALSE AND FALSE" and such
try (Hook.Closeable ignore =
Hook.REL_BUILDER_SIMPLIFY.addThread(Hook.propertyJ(false))) {
sql(query).ok(expected);
}
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1586">[CALCITE-1586]
* JDBC adapter generates wrong SQL if UNION has more than two inputs</a>. */
@Test void testThreeQueryUnion() {
String query = "SELECT \"product_id\" FROM \"product\" "
+ " UNION ALL "
+ "SELECT \"product_id\" FROM \"sales_fact_1997\" "
+ " UNION ALL "
+ "SELECT \"product_class_id\" AS product_id FROM \"product_class\"";
String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
+ "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "UNION ALL\n"
+ "SELECT \"product_class_id\" AS \"PRODUCT_ID\"\n"
+ "FROM \"foodmart\".\"product_class\"";
final RuleSet rules = RuleSets.ofList(CoreRules.UNION_MERGE);
sql(query)
.optimize(rules, null)
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1800">[CALCITE-1800]
* JDBC adapter fails to SELECT FROM a UNION query</a>. */
@Test void testUnionWrappedInASelect() {
final String query = "select sum(\n"
+ " case when \"product_id\"=0 then \"net_weight\" else 0 end)"
+ " as net_weight\n"
+ "from (\n"
+ " select \"product_id\", \"net_weight\"\n"
+ " from \"product\"\n"
+ " union all\n"
+ " select \"product_id\", 0 as \"net_weight\"\n"
+ " from \"sales_fact_1997\") t0";
final String expected = "SELECT SUM(CASE WHEN \"product_id\" = 0"
+ " THEN \"net_weight\" ELSE 0 END) AS \"NET_WEIGHT\"\n"
+ "FROM (SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
+ "SELECT \"product_id\", 0 AS \"net_weight\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\") AS \"t1\"";
sql(query).ok(expected);
}
@Test void testLiteral() {
checkLiteral("DATE '1978-05-02'");
checkLiteral2("DATE '1978-5-2'", "DATE '1978-05-02'");
checkLiteral("TIME '12:34:56'");
checkLiteral("TIME '12:34:56.78'");
checkLiteral2("TIME '1:4:6.080'", "TIME '01:04:06.080'");
checkLiteral("TIMESTAMP '1978-05-02 12:34:56.78'");
checkLiteral2("TIMESTAMP '1978-5-2 2:4:6.80'",
"TIMESTAMP '1978-05-02 02:04:06.80'");
checkLiteral("'I can''t explain'");
checkLiteral("''");
checkLiteral("TRUE");
checkLiteral("123");
checkLiteral("123.45");
checkLiteral("-123.45");
checkLiteral("INTERVAL '1-2' YEAR TO MONTH");
checkLiteral("INTERVAL -'1-2' YEAR TO MONTH");
checkLiteral("INTERVAL '12-11' YEAR TO MONTH");
checkLiteral("INTERVAL '1' YEAR");
checkLiteral("INTERVAL '1' MONTH");
checkLiteral("INTERVAL '12' DAY");
checkLiteral("INTERVAL -'12' DAY");
checkLiteral2("INTERVAL '1 2' DAY TO HOUR",
"INTERVAL '1 02' DAY TO HOUR");
checkLiteral2("INTERVAL '1 2:10' DAY TO MINUTE",
"INTERVAL '1 02:10' DAY TO MINUTE");
checkLiteral2("INTERVAL '1 2:00' DAY TO MINUTE",
"INTERVAL '1 02:00' DAY TO MINUTE");
checkLiteral2("INTERVAL '1 2:34:56' DAY TO SECOND",
"INTERVAL '1 02:34:56' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:34:56.789' DAY TO SECOND",
"INTERVAL '1 02:34:56.789' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:34:56.78' DAY TO SECOND",
"INTERVAL '1 02:34:56.78' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:34:56.078' DAY TO SECOND",
"INTERVAL '1 02:34:56.078' DAY TO SECOND");
checkLiteral2("INTERVAL -'1 2:34:56.078' DAY TO SECOND",
"INTERVAL -'1 02:34:56.078' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:3:5.070' DAY TO SECOND",
"INTERVAL '1 02:03:05.07' DAY TO SECOND");
checkLiteral("INTERVAL '1:23' HOUR TO MINUTE");
checkLiteral("INTERVAL '1:02' HOUR TO MINUTE");
checkLiteral("INTERVAL -'1:02' HOUR TO MINUTE");
checkLiteral("INTERVAL '1:23:45' HOUR TO SECOND");
checkLiteral("INTERVAL '1:03:05' HOUR TO SECOND");
checkLiteral("INTERVAL '1:23:45.678' HOUR TO SECOND");
checkLiteral("INTERVAL '1:03:05.06' HOUR TO SECOND");
checkLiteral("INTERVAL '12' MINUTE");
checkLiteral("INTERVAL '12:34' MINUTE TO SECOND");
checkLiteral("INTERVAL '12:34.567' MINUTE TO SECOND");
checkLiteral("INTERVAL '12' SECOND");
checkLiteral("INTERVAL '12.345' SECOND");
}
private void checkLiteral(String expression) {
checkLiteral2(expression, expression);
}
private void checkLiteral2(String expression, String expected) {
sql("VALUES " + expression)
.withHsqldb()
.ok("SELECT *\n"
+ "FROM (VALUES (" + expected + ")) AS t (EXPR$0)");
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2625">[CALCITE-2625]
* Removing Window Boundaries from SqlWindow of Aggregate Function which do
* not allow Framing</a>. */
@Test void testRowNumberFunctionForPrintingOfFrameBoundary() {
String query = "SELECT row_number() over (order by \"hire_date\") FROM \"employee\"";
String expected = "SELECT ROW_NUMBER() OVER (ORDER BY \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3112">[CALCITE-3112]
* Support Window in RelToSqlConverter</a>. */
@Test void testConvertWindowToSql() {
String query0 = "SELECT row_number() over (order by \"hire_date\") FROM \"employee\"";
String expected0 = "SELECT ROW_NUMBER() OVER (ORDER BY \"hire_date\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query1 = "SELECT rank() over (order by \"hire_date\") FROM \"employee\"";
String expected1 = "SELECT RANK() OVER (ORDER BY \"hire_date\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query2 = "SELECT lead(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\")\n"
+ "FROM \"employee\"";
String expected2 = "SELECT LEAD(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" "
+ "ORDER BY \"employee_id\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query3 = "SELECT lag(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\")\n"
+ "FROM \"employee\"";
String expected3 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query4 = "SELECT lag(\"employee_id\",1,'NA') "
+ "over (partition by \"hire_date\" order by \"employee_id\") as lag1, "
+ "lag(\"employee_id\",1,'NA') "
+ "over (partition by \"birth_date\" order by \"employee_id\") as lag2, "
+ "count(*) over (partition by \"hire_date\" order by \"employee_id\") as count1, "
+ "count(*) over (partition by \"birth_date\" order by \"employee_id\") as count2\n"
+ "FROM \"employee\"";
String expected4 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
+ "LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
+ "COUNT(*) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
+ "COUNT(*) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query5 = "SELECT lag(\"employee_id\",1,'NA') "
+ "over (partition by \"hire_date\" order by \"employee_id\") as lag1, "
+ "lag(\"employee_id\",1,'NA') "
+ "over (partition by \"birth_date\" order by \"employee_id\") as lag2, "
+ "max(sum(\"employee_id\")) over (partition by \"hire_date\" order by \"employee_id\") "
+ "as count1, "
+ "max(sum(\"employee_id\")) over (partition by \"birth_date\" order by \"employee_id\") "
+ "as count2\n"
+ "FROM \"employee\" group by \"employee_id\", \"hire_date\", \"birth_date\"";
String expected5 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
+ "LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
+ "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
+ "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"employee_id\", \"hire_date\", \"birth_date\"";
String query6 = "SELECT lag(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\"), \"hire_date\"\n"
+ "FROM \"employee\"\n"
+ "group by \"hire_date\", \"employee_id\"";
String expected6 = "SELECT LAG(\"employee_id\", 1, 'NA') "
+ "OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\"), \"hire_date\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"hire_date\", \"employee_id\"";
String query7 = "SELECT "
+ "count(distinct \"employee_id\") over (order by \"hire_date\") FROM \"employee\"";
String expected7 = "SELECT "
+ "COUNT(DISTINCT \"employee_id\") "
+ "OVER (ORDER BY \"hire_date\" RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$0\""
+ "\nFROM \"foodmart\".\"employee\"";
String query8 = "SELECT "
+ "sum(distinct \"position_id\") over (order by \"hire_date\") FROM \"employee\"";
String expected8 =
"SELECT CASE WHEN (COUNT(DISTINCT \"position_id\") OVER (ORDER BY \"hire_date\" "
+ "RANGE"
+ " BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)) > 0 THEN COALESCE(SUM(DISTINCT "
+ "\"position_id\") OVER (ORDER BY \"hire_date\" RANGE BETWEEN UNBOUNDED "
+ "PRECEDING AND CURRENT ROW), 0) ELSE NULL END\n"
+ "FROM \"foodmart\".\"employee\"";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(ProjectToWindowRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW);
sql(query0).optimize(rules, hepPlanner).ok(expected0);
sql(query1).optimize(rules, hepPlanner).ok(expected1);
sql(query2).optimize(rules, hepPlanner).ok(expected2);
sql(query3).optimize(rules, hepPlanner).ok(expected3);
sql(query4).optimize(rules, hepPlanner).ok(expected4);
sql(query5).optimize(rules, hepPlanner).ok(expected5);
sql(query6).optimize(rules, hepPlanner).ok(expected6);
sql(query7).optimize(rules, hepPlanner).ok(expected7);
sql(query8).optimize(rules, hepPlanner).ok(expected8);
}
/**
* Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3866">[CALCITE-3866]
* "numeric field overflow" when running the generated SQL in PostgreSQL </a>.
*/
@Test void testSumReturnType() {
String query =
"select sum(e1.\"store_sales\"), sum(e2.\"store_sales\") from \"sales_fact_dec_1998\" as "
+ "e1 , \"sales_fact_dec_1998\" as e2 where e1.\"product_id\" = e2.\"product_id\"";
String expect = "SELECT SUM(CAST(SUM(\"store_sales\") * \"t0\".\"$f1\" AS DECIMAL"
+ "(19, 4))), SUM(CAST(\"t\".\"$f2\" * SUM(\"store_sales\") AS DECIMAL(19, 4)))\n"
+ "FROM (SELECT \"product_id\", SUM(\"store_sales\"), COUNT(*) AS \"$f2\"\n"
+ "FROM \"foodmart\".\"sales_fact_dec_1998\"\n"
+ "GROUP BY \"product_id\") AS \"t\"\n"
+ "INNER JOIN "
+ "(SELECT \"product_id\", COUNT(*) AS \"$f1\", SUM(\"store_sales\")\n"
+ "FROM \"foodmart\".\"sales_fact_dec_1998\"\n"
+ "GROUP BY \"product_id\") AS \"t0\" ON \"t\".\"product_id\" = \"t0\".\"product_id\"";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterJoinRule.class);
builder.addRuleClass(AggregateProjectMergeRule.class);
builder.addRuleClass(AggregateJoinTransposeRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(
CoreRules.FILTER_INTO_JOIN,
CoreRules.JOIN_CONDITION_PUSH,
CoreRules.AGGREGATE_PROJECT_MERGE, CoreRules.AGGREGATE_JOIN_TRANSPOSE_EXTENDED);
sql(query).withPostgresql().optimize(rules, hepPlanner).ok(expect);
}
@Test void testRankFunctionForPrintingOfFrameBoundary() {
String query = "SELECT rank() over (order by \"hire_date\") FROM \"employee\"";
String expected = "SELECT RANK() OVER (ORDER BY \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
@Test void testLeadFunctionForPrintingOfFrameBoundary() {
String query = "SELECT lead(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\") FROM \"employee\"";
String expected = "SELECT LEAD(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
@Test void testLagFunctionForPrintingOfFrameBoundary() {
String query = "SELECT lag(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\") FROM \"employee\"";
String expected = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3876">[CALCITE-3876]
* RelToSqlConverter should not combine Projects when top Project contains
* window function referencing window function from bottom Project</a>. */
@Test void testWindowOnWindowDoesNotCombineProjects() {
final String query = "SELECT ROW_NUMBER() OVER (ORDER BY rn)\n"
+ "FROM (SELECT *,\n"
+ " ROW_NUMBER() OVER (ORDER BY \"product_id\") as rn\n"
+ " FROM \"foodmart\".\"product\")";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY \"RN\")\n"
+ "FROM (SELECT \"product_class_id\", \"product_id\", \"brand_name\","
+ " \"product_name\", \"SKU\", \"SRP\", \"gross_weight\","
+ " \"net_weight\", \"recyclable_package\", \"low_fat\","
+ " \"units_per_case\", \"cases_per_pallet\", \"shelf_width\","
+ " \"shelf_height\", \"shelf_depth\","
+ " ROW_NUMBER() OVER (ORDER BY \"product_id\") AS \"RN\"\n"
+ "FROM \"foodmart\".\"product\") AS \"t\"";
sql(query)
.withPostgresql()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1798">[CALCITE-1798]
* Generate dialect-specific SQL for FLOOR operator</a>. */
@Test void testFloor() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT TRUNC(hire_date, 'MI')\nFROM foodmart.employee";
sql(query)
.withHsqldb()
.ok(expected);
}
@Test void testFloorClickHouse() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT toStartOfMinute(`hire_date`)\nFROM `foodmart`.`employee`";
sql(query)
.withClickHouse()
.ok(expected);
}
@Test void testFloorPostgres() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT DATE_TRUNC('MINUTE', \"hire_date\")\nFROM \"foodmart\".\"employee\"";
sql(query)
.withPostgresql()
.ok(expected);
}
@Test void testFloorOracle() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT TRUNC(\"hire_date\", 'MINUTE')\nFROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testFloorPresto() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT DATE_TRUNC('MINUTE', \"hire_date\")\nFROM \"foodmart\".\"employee\"";
sql(query)
.withPresto()
.ok(expected);
}
@Test void testFloorMssqlWeek() {
String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
String expected = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(10), "
+ "DATEADD(day, - (6 + DATEPART(weekday, [hire_date] )) % 7, [hire_date] ), 126))\n"
+ "FROM [foodmart].[employee]";
sql(query).withMssql()
.ok(expected);
}
@Test void testFloorMssqlMonth() {
String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
String expected = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(7), [hire_date] , 126)+'-01')\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withMssql()
.ok(expected);
}
@Test void testFloorMysqlMonth() {
String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-01')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorWeek() {
final String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
final String expectedClickHouse = "SELECT toMonday(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
final String expectedMssql = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(10), "
+ "DATEADD(day, - (6 + DATEPART(weekday, [hire_date] )) % 7, [hire_date] ), 126))\n"
+ "FROM [foodmart].[employee]";
final String expectedMysql = "SELECT STR_TO_DATE(DATE_FORMAT(`hire_date` , '%x%v-1'), "
+ "'%x%v-%w')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withMssql()
.ok(expectedMssql)
.withMysql()
.ok(expectedMysql);
}
@Test void testUnparseSqlIntervalQualifierDb2() {
String queryDatePlus = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDatePlus = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE (employee.hire_date + 19800 SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(queryDatePlus)
.withDb2()
.ok(expectedDatePlus);
String queryDateMinus = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinus = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE (employee.hire_date - 19800 SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(queryDateMinus)
.withDb2()
.ok(expectedDateMinus);
}
@Test void testUnparseSqlIntervalQualifierMySql() {
final String sql0 = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect0 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` - INTERVAL '19800' SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql0).withMysql().ok(expect0);
final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '10' HOUR)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql1).withMysql().ok(expect1);
final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '1-2' year to month > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect2 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '1-2' YEAR_MONTH)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql2).withMysql().ok(expect2);
final String sql3 = "select * from \"employee\" "
+ "where \"hire_date\" + INTERVAL '39:12' MINUTE TO SECOND"
+ " > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect3 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '39:12' MINUTE_SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql3).withMysql().ok(expect3);
}
@Test void testUnparseSqlIntervalQualifierMsSql() {
String queryDatePlus = "select * from \"employee\" where \"hire_date\" +"
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDatePlus = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, 19800, [hire_date]) > CAST('2005-10-17 00:00:00' AS TIMESTAMP(0))";
sql(queryDatePlus)
.withMssql()
.ok(expectedDatePlus);
String queryDateMinus = "select * from \"employee\" where \"hire_date\" -"
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinus = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, -19800, [hire_date]) > CAST('2005-10-17 00:00:00' AS TIMESTAMP(0))";
sql(queryDateMinus)
.withMssql()
.ok(expectedDateMinus);
String queryDateMinusNegate = "select * from \"employee\" "
+ "where \"hire_date\" -INTERVAL '-19800' SECOND(5)"
+ " > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinusNegate = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, 19800, [hire_date]) > CAST('2005-10-17 00:00:00' AS TIMESTAMP(0))";
sql(queryDateMinusNegate)
.withMssql()
.ok(expectedDateMinusNegate);
}
@Test public void testUnparseTimeLiteral() {
String queryDatePlus = "select TIME '11:25:18' "
+ "from \"employee\"";
String expectedBQSql = "SELECT TIME '11:25:18'\n"
+ "FROM foodmart.employee";
String expectedSql = "SELECT CAST('11:25:18' AS TIME(0))\n"
+ "FROM [foodmart].[employee]";
sql(queryDatePlus)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedSql);
}
@Test void testUnparseSqlIntervalQualifierBigQuery() {
final String sql0 = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect0 = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE TIMESTAMP_SUB(hire_date, INTERVAL 19800 SECOND)"
+ " > CAST('2005-10-17 00:00:00' AS DATETIME)";
sql(sql0).withBigQuery().ok(expect0);
final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE TIMESTAMP_ADD(hire_date, INTERVAL 10 HOUR)"
+ " > CAST('2005-10-17 00:00:00' AS DATETIME)";
sql(sql1).withBigQuery().ok(expect1);
final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '1 2:34:56.78' DAY TO SECOND > TIMESTAMP '2005-10-17 00:00:00' ";
sql(sql2).withBigQuery().throws_("For input string: \"56.78\"");
}
@Test public void testFloorMysqlWeek() {
String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
String expected = "SELECT STR_TO_DATE(DATE_FORMAT(`hire_date` , '%x%v-1'), '%x%v-%w')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorMonth() {
final String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
final String expectedClickHouse = "SELECT toStartOfMonth(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
final String expectedMssql = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(7), [hire_date] , "
+ "126)+'-01')\n"
+ "FROM [foodmart].[employee]";
final String expectedMysql = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-01')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withMssql()
.ok(expectedMssql)
.withMysql()
.ok(expectedMysql);
}
@Test void testFloorMysqlHour() {
String query = "SELECT floor(\"hire_date\" TO HOUR) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:00:00')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorMysqlMinute() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorMysqlSecond() {
String query = "SELECT floor(\"hire_date\" TO SECOND) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:%s')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1826">[CALCITE-1826]
* JDBC dialect-specific FLOOR fails when in GROUP BY</a>. */
@Test void testFloorWithGroupBy() {
final String query = "SELECT floor(\"hire_date\" TO MINUTE)\n"
+ "FROM \"employee\"\n"
+ "GROUP BY floor(\"hire_date\" TO MINUTE)";
final String expected = "SELECT TRUNC(hire_date, 'MI')\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY TRUNC(hire_date, 'MI')";
final String expectedClickHouse = "SELECT toStartOfMinute(`hire_date`)\n"
+ "FROM `foodmart`.`employee`\n"
+ "GROUP BY toStartOfMinute(`hire_date`)";
final String expectedOracle = "SELECT TRUNC(\"hire_date\", 'MINUTE')\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY TRUNC(\"hire_date\", 'MINUTE')";
final String expectedPostgresql = "SELECT DATE_TRUNC('MINUTE', \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY DATE_TRUNC('MINUTE', \"hire_date\")";
final String expectedMysql = "SELECT"
+ " DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')\n"
+ "FROM `foodmart`.`employee`\n"
+ "GROUP BY DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')";
sql(query)
.withHsqldb()
.ok(expected)
.withClickHouse()
.ok(expectedClickHouse)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql)
.withMysql()
.ok(expectedMysql);
}
@Test void testSubstring() {
final String query = "select substring(\"brand_name\" from 2) "
+ "from \"product\"\n";
final String expectedClickHouse = "SELECT substring(`brand_name`, 2)\n"
+ "FROM `foodmart`.`product`";
final String expectedOracle = "SELECT SUBSTR(\"brand_name\", 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT SUBSTRING(\"brand_name\" FROM 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPresto = "SELECT SUBSTR(\"brand_name\", 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowflake = "SELECT SUBSTR(\"brand_name\", 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedRedshift = expectedPostgresql;
final String expectedMysql = "SELECT SUBSTRING(`brand_name` FROM 2)\n"
+ "FROM `foodmart`.`product`";
final String expectedHive = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
final String expectedSpark = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
final String expectedBiqQuery = "SELECT SUBSTR(brand_name, 2)\n"
+ "FROM foodmart.product";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql)
.withPresto()
.ok(expectedPresto)
.withSnowflake()
.ok(expectedSnowflake)
.withRedshift()
.ok(expectedRedshift)
.withMysql()
.ok(expectedMysql)
.withMssql()
// mssql does not support this syntax and so should fail
.throws_("MSSQL SUBSTRING requires FROM and FOR arguments")
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBiqQuery);
}
@Test void testSubstringWithFor() {
final String query = "select substring(\"brand_name\" from 2 for 3) "
+ "from \"product\"\n";
final String expectedClickHouse = "SELECT substring(`brand_name`, 2, 3)\n"
+ "FROM `foodmart`.`product`";
final String expectedOracle = "SELECT SUBSTR(\"brand_name\", 2, 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT SUBSTRING(\"brand_name\" FROM 2 FOR 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPresto = "SELECT SUBSTR(\"brand_name\", 2, 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowflake = "SELECT SUBSTR(\"brand_name\", 2, 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedRedshift = expectedPostgresql;
final String expectedMysql = "SELECT SUBSTRING(`brand_name` FROM 2 FOR 3)\n"
+ "FROM `foodmart`.`product`";
final String expectedMssql = "SELECT SUBSTRING([brand_name], 2, 3)\n"
+ "FROM [foodmart].[product]";
final String expectedHive = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
final String expectedSpark = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql)
.withPresto()
.ok(expectedPresto)
.withSnowflake()
.ok(expectedSnowflake)
.withRedshift()
.ok(expectedRedshift)
.withMysql()
.ok(expectedMysql)
.withMssql()
.ok(expectedMssql)
.withSpark()
.ok(expectedSpark)
.withHive()
.ok(expectedHive);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1849">[CALCITE-1849]
* Support sub-queries (RexSubQuery) in RelToSqlConverter</a>. */
@Test void testExistsWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where exists (select count(*) "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE EXISTS (SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testNotExistsWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where not exists (select count(*) "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE NOT EXISTS (SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testSubQueryInWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_id\" in (select \"product_id\" "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" IN (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testSubQueryInWithExpand2() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_id\" in (1, 2)";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = 1 OR \"product_id\" = 2";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testSubQueryNotInWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_id\" not in (select \"product_id\" "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" NOT IN (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testLike() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_name\" like 'abc'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" LIKE 'abc'";
sql(query).ok(expected);
}
@Test void testNotLike() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_name\" not like 'abc'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" NOT LIKE 'abc'";
sql(query).ok(expected);
}
@Test void testMatchRecognizePatternExpression() {
String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " partition by \"product_class_id\", \"brand_name\"\n"
+ " order by \"product_class_id\" asc, \"brand_name\" desc\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "PARTITION BY \"product_class_id\", \"brand_name\"\n"
+ "ORDER BY \"product_class_id\", \"brand_name\" DESC\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+$)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" + $)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (^strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (^ \"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (^strt down+ up+$)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (^ \"STRT\" \"DOWN\" + \"UP\" + $)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression5() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down* up?)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" * \"UP\" ?)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression6() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt {-down-} up?)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" {- \"DOWN\" -} \"UP\" ?)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression7() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down{2} up{3,})\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" { 2 } \"UP\" { 3, })\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression8() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down{,2} up{3,5})\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" { , 2 } \"UP\" { 3, 5 })\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression9() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt {-down+-} {-up*-})\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" {- \"DOWN\" + -} {- \"UP\" * -})\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression10() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (A B C | A C B | B A C | B C A | C A B | C B A)\n"
+ " define\n"
+ " A as A.\"net_weight\" < PREV(A.\"net_weight\"),\n"
+ " B as B.\"net_weight\" > PREV(B.\"net_weight\"),\n"
+ " C as C.\"net_weight\" < PREV(C.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN "
+ "(\"A\" \"B\" \"C\" | \"A\" \"C\" \"B\" | \"B\" \"A\" \"C\" "
+ "| \"B\" \"C\" \"A\" | \"C\" \"A\" \"B\" | \"C\" \"B\" \"A\")\n"
+ "DEFINE "
+ "\"A\" AS PREV(\"A\".\"net_weight\", 0) < PREV(\"A\".\"net_weight\", 1), "
+ "\"B\" AS PREV(\"B\".\"net_weight\", 0) > PREV(\"B\".\"net_weight\", 1), "
+ "\"C\" AS PREV(\"C\".\"net_weight\", 0) < PREV(\"C\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression11() {
final String sql = "select *\n"
+ " from (select * from \"product\") match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression12() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr order by MR.\"net_weight\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"net_weight\"";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression13() {
final String sql = "select *\n"
+ " from (\n"
+ "select *\n"
+ "from \"sales_fact_1997\" as s\n"
+ "join \"customer\" as c\n"
+ " on s.\"customer_id\" = c.\"customer_id\"\n"
+ "join \"product\" as p\n"
+ " on s.\"product_id\" = p.\"product_id\"\n"
+ "join \"product_class\" as pc\n"
+ " on p.\"product_class_id\" = pc.\"product_class_id\"\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'"
+ ") match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr order by MR.\"net_weight\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\".\"customer_id\"\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\"\n"
+ "INNER JOIN \"foodmart\".\"product_class\" "
+ "ON \"product\".\"product_class_id\" = \"product_class\".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' "
+ "AND \"product_class\".\"product_department\" = 'Snacks') "
+ "MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"net_weight\"";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < FIRST(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > LAST(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "FIRST(\"DOWN\".\"net_weight\", 0), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "LAST(\"UP\".\"net_weight\", 0))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\",1),\n"
+ " up as up.\"net_weight\" > LAST(up.\"net_weight\" + up.\"gross_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "LAST(\"UP\".\"net_weight\", 0) + LAST(\"UP\".\"gross_weight\", 0))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\",1),\n"
+ " up as up.\"net_weight\" > "
+ "PREV(LAST(up.\"net_weight\" + up.\"gross_weight\"),3)\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(LAST(\"UP\".\"net_weight\", 0) + "
+ "LAST(\"UP\".\"gross_weight\", 0), 3))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures MATCH_NUMBER() as match_num, "
+ " CLASSIFIER() as var_match, "
+ " STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " LAST(up.\"net_weight\") as end_nw"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL MATCH_NUMBER () AS \"MATCH_NUM\", "
+ "FINAL CLASSIFIER() AS \"VAR_MATCH\", "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"END_NW\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " FINAL LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " LAST(up.\"net_weight\") as end_nw"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"END_NW\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " RUNNING LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " LAST(up.\"net_weight\") as end_nw"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL (RUNNING LAST(\"DOWN\".\"net_weight\", 0)) AS \"BOTTOM_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"END_NW\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " FINAL COUNT(up.\"net_weight\") as up_cnt,"
+ " FINAL COUNT(\"net_weight\") as down_cnt,"
+ " RUNNING COUNT(\"net_weight\") as running_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL COUNT(\"UP\".\"net_weight\") AS \"UP_CNT\", "
+ "FINAL COUNT(\"*\".\"net_weight\") AS \"DOWN_CNT\", "
+ "FINAL (RUNNING COUNT(\"*\".\"net_weight\")) AS \"RUNNING_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures5() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
+ " LAST(UP.\"net_weight\") as up_cnt,"
+ " AVG(DOWN.\"net_weight\") as down_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL FIRST(\"STRT\".\"net_weight\", 0) AS \"START_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"UP_CNT\", "
+ "FINAL (SUM(\"DOWN\".\"net_weight\") / "
+ "COUNT(\"DOWN\".\"net_weight\")) AS \"DOWN_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures6() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
+ " LAST(DOWN.\"net_weight\") as up_cnt,"
+ " FINAL SUM(DOWN.\"net_weight\") as down_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL FIRST(\"STRT\".\"net_weight\", 0) AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"UP_CNT\", "
+ "FINAL SUM(\"DOWN\".\"net_weight\") AS \"DOWN_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN "
+ "(\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures7() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
+ " LAST(DOWN.\"net_weight\") as up_cnt,"
+ " FINAL SUM(DOWN.\"net_weight\") as down_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr order by start_nw, up_cnt";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL FIRST(\"STRT\".\"net_weight\", 0) AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"UP_CNT\", "
+ "FINAL SUM(\"DOWN\".\"net_weight\") AS \"DOWN_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN "
+ "(\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"START_NW\", \"UP_CNT\"";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to next row\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip past last row\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP PAST LAST ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to FIRST down\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO FIRST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE \"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to last down\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO LAST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip5() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to down\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO LAST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to down\n"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO LAST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " AVG(STDN.\"net_weight\") as avg_stdn"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL (SUM(\"STDN\".\"net_weight\") / "
+ "COUNT(\"STDN\".\"net_weight\")) AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down), stdn2 = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\"), \"STDN2\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeRowsPerMatch1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " ONE ROW PER MATCH\n"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down), stdn2 = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\"), \"STDN2\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeRowsPerMatch2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " ALL ROWS PER MATCH\n"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down), stdn2 = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "RUNNING \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "RUNNING LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "RUNNING SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ALL ROWS PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\"), \"STDN2\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeWithin() {
final String sql = "select *\n"
+ " from \"employee\" match_recognize\n"
+ " (\n"
+ " order by \"hire_date\"\n"
+ " ALL ROWS PER MATCH\n"
+ " pattern (strt down+ up+) within interval '3:12:22.123' hour to second\n"
+ " define\n"
+ " down as down.\"salary\" < PREV(down.\"salary\"),\n"
+ " up as up.\"salary\" > prev(up.\"salary\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"employee\") "
+ "MATCH_RECOGNIZE(\n"
+ "ORDER BY \"hire_date\"\n"
+ "ALL ROWS PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +) WITHIN INTERVAL '3:12:22.123' HOUR TO SECOND\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"salary\", 0) < "
+ "PREV(\"DOWN\".\"salary\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"salary\", 0) > "
+ "PREV(\"UP\".\"salary\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeIn() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " partition by \"product_class_id\", \"brand_name\"\n"
+ " order by \"product_class_id\" asc, \"brand_name\" desc\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" in (0, 1),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "PARTITION BY \"product_class_id\", \"brand_name\"\n"
+ "ORDER BY \"product_class_id\", \"brand_name\" DESC\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) = "
+ "0 OR PREV(\"DOWN\".\"net_weight\", 0) = 1, "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testValues() {
final String sql = "select \"a\"\n"
+ "from (values (1, 'x'), (2, 'yy')) as t(\"a\", \"b\")";
final String expectedHsqldb = "SELECT a\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS t (a, b)";
final String expectedMysql = "SELECT `a`\n"
+ "FROM (SELECT 1 AS `a`, 'x ' AS `b`\n"
+ "UNION ALL\n"
+ "SELECT 2 AS `a`, 'yy' AS `b`) AS `t`";
final String expectedPostgresql = "SELECT \"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t\" (\"a\", \"b\")";
final String expectedOracle = "SELECT \"a\"\n"
+ "FROM (SELECT 1 \"a\", 'x ' \"b\"\n"
+ "FROM \"DUAL\"\n"
+ "UNION ALL\n"
+ "SELECT 2 \"a\", 'yy' \"b\"\n"
+ "FROM \"DUAL\")";
final String expectedHive = "SELECT a\n"
+ "FROM (SELECT 1 a, 'x ' b\n"
+ "UNION ALL\n"
+ "SELECT 2 a, 'yy' b)";
final String expectedSpark = "SELECT a\n"
+ "FROM (SELECT 1 a, 'x ' b\n"
+ "UNION ALL\n"
+ "SELECT 2 a, 'yy' b)";
final String expectedBigQuery = "SELECT a\n"
+ "FROM (SELECT 1 AS a, 'x ' AS b\n"
+ "UNION ALL\n"
+ "SELECT 2 AS a, 'yy' AS b)";
final String expectedSnowflake = "SELECT \"a\"\n"
+ "FROM (SELECT 1 AS \"a\", 'x ' AS \"b\"\n"
+ "UNION ALL\n"
+ "SELECT 2 AS \"a\", 'yy' AS \"b\")";
final String expectedRedshift = expectedPostgresql;
sql(sql)
.withHsqldb()
.ok(expectedHsqldb)
.withMysql()
.ok(expectedMysql)
.withPostgresql()
.ok(expectedPostgresql)
.withOracle()
.ok(expectedOracle)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowflake)
.withRedshift()
.ok(expectedRedshift);
}
@Test void testValuesEmpty() {
final String sql = "select *\n"
+ "from (values (1, 'a'), (2, 'bb')) as t(x, y)\n"
+ "limit 0";
final RuleSet rules =
RuleSets.ofList(PruneEmptyRules.SORT_FETCH_ZERO_INSTANCE);
final String expectedMysql = "SELECT *\n"
+ "FROM (SELECT NULL AS `X`, NULL AS `Y`) AS `t`\n"
+ "WHERE 1 = 0";
final String expectedOracle = "SELECT NULL \"X\", NULL \"Y\"\n"
+ "FROM \"DUAL\"\n"
+ "WHERE 1 = 0";
final String expectedPostgresql = "SELECT *\n"
+ "FROM (VALUES (NULL, NULL)) AS \"t\" (\"X\", \"Y\")\n"
+ "WHERE 1 = 0";
sql(sql)
.optimize(rules, null)
.withMysql()
.ok(expectedMysql)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3840">[CALCITE-3840]
* Re-aliasing of VALUES that has column aliases produces wrong SQL in the
* JDBC adapter</a>. */
@Test void testValuesReAlias() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
.values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
.join(JoinRelType.FULL)
.project(builder.field("a"))
.build();
final String expectedSql = "SELECT \"t\".\"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t\" (\"a\", \"b\")\n"
+ "FULL JOIN (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t0\" (\"a\", \"b\") ON TRUE";
assertThat(toSql(root), isLinux(expectedSql));
// Now with indentation.
final String expectedSql2 = "SELECT \"t\".\"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ " (2, 'yy')) AS \"t\" (\"a\", \"b\")\n"
+ " FULL JOIN (VALUES (1, 'x '),\n"
+ " (2, 'yy')) AS \"t0\" (\"a\", \"b\") ON TRUE";
assertThat(
toSql(root, DatabaseProduct.CALCITE.getDialect(),
c -> c.withIndentation(2)),
isLinux(expectedSql2));
}
@Test void testSelectWithoutFromEmulationForHiveAndBigQuery() {
String query = "select 2 + 2";
final String expected = "SELECT 2 + 2";
sql(query)
.withHive().ok(expected)
.withBigQuery().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2118">[CALCITE-2118]
* RelToSqlConverter should only generate "*" if field names match</a>. */
@Test void testPreserveAlias() {
final String sql = "select \"warehouse_class_id\" as \"id\",\n"
+ " \"description\"\n"
+ "from \"warehouse_class\"";
final String expected = ""
+ "SELECT \"warehouse_class_id\" AS \"id\", \"description\"\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
sql(sql).ok(expected);
final String sql2 = "select \"warehouse_class_id\", \"description\"\n"
+ "from \"warehouse_class\"";
final String expected2 = "SELECT *\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
sql(sql2).ok(expected2);
}
@Test void testPreservePermutation() {
final String sql = "select \"description\", \"warehouse_class_id\"\n"
+ "from \"warehouse_class\"";
final String expected = "SELECT \"description\", \"warehouse_class_id\"\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
sql(sql).ok(expected);
}
@Test void testFieldNamesWithAggregateSubQuery() {
final String query = "select mytable.\"city\",\n"
+ " sum(mytable.\"store_sales\") as \"my-alias\"\n"
+ "from (select c.\"city\", s.\"store_sales\"\n"
+ " from \"sales_fact_1997\" as s\n"
+ " join \"customer\" as c using (\"customer_id\")\n"
+ " group by c.\"city\", s.\"store_sales\") AS mytable\n"
+ "group by mytable.\"city\"";
final String expected = "SELECT \"t0\".\"city\","
+ " SUM(\"t0\".\"store_sales\") AS \"my-alias\"\n"
+ "FROM (SELECT \"customer\".\"city\","
+ " \"sales_fact_1997\".\"store_sales\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\""
+ " ON \"sales_fact_1997\".\"customer_id\""
+ " = \"customer\".\"customer_id\"\n"
+ "GROUP BY \"customer\".\"city\","
+ " \"sales_fact_1997\".\"store_sales\") AS \"t0\"\n"
+ "GROUP BY \"t0\".\"city\"";
sql(query).ok(expected);
}
@Test void testUnparseSelectMustUseDialect() {
final String query = "select * from \"product\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.product";
final boolean[] callsUnparseCallOnSqlSelect = {false};
final SqlDialect dialect = new SqlDialect(SqlDialect.EMPTY_CONTEXT) {
@Override public void unparseCall(SqlWriter writer, SqlCall call,
int leftPrec, int rightPrec) {
if (call instanceof SqlSelect) {
callsUnparseCallOnSqlSelect[0] = true;
}
super.unparseCall(writer, call, leftPrec, rightPrec);
}
};
sql(query).dialect(dialect).ok(expected);
assertThat("Dialect must be able to customize unparseCall() for SqlSelect",
callsUnparseCallOnSqlSelect[0], is(true));
}
@Test void testCorrelate() {
final String sql = "select d.\"department_id\", d_plusOne "
+ "from \"department\" as d, "
+ " lateral (select d.\"department_id\" + 1 as d_plusOne"
+ " from (values(true)))";
final String expected = "SELECT \"$cor0\".\"department_id\", \"$cor0\".\"D_PLUSONE\"\n"
+ "FROM \"foodmart\".\"department\" AS \"$cor0\",\n"
+ "LATERAL (SELECT \"$cor0\".\"department_id\" + 1 AS \"D_PLUSONE\"\n"
+ "FROM (VALUES (TRUE)) AS \"t\" (\"EXPR$0\")) AS \"t0\"";
sql(sql).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3651">[CALCITE-3651]
* NullPointerException when convert relational algebra that correlates TableFunctionScan</a>. */
@Test void testLateralCorrelate() {
final String query = "select * from \"product\",\n"
+ "lateral table(RAMP(\"product\".\"product_id\"))";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" AS \"$cor0\",\n"
+ "LATERAL (SELECT *\n"
+ "FROM TABLE(RAMP(\"$cor0\".\"product_id\"))) AS \"t\"";
sql(query).ok(expected);
}
@Test void testUncollectExplicitAlias() {
final String sql = "select did + 1\n"
+ "from unnest(select collect(\"department_id\") as deptid"
+ " from \"department\") as t(did)";
final String expected = "SELECT \"DEPTID\" + 1\n"
+ "FROM UNNEST (SELECT COLLECT(\"department_id\") AS \"DEPTID\"\n"
+ "FROM \"foodmart\".\"department\") AS \"t0\" (\"DEPTID\")";
sql(sql).ok(expected);
}
@Test void testUncollectImplicitAlias() {
final String sql = "select did + 1\n"
+ "from unnest(select collect(\"department_id\") "
+ " from \"department\") as t(did)";
final String expected = "SELECT \"col_0\" + 1\n"
+ "FROM UNNEST (SELECT COLLECT(\"department_id\")\n"
+ "FROM \"foodmart\".\"department\") AS \"t0\" (\"col_0\")";
sql(sql).ok(expected);
}
@Test void testWithinGroup1() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"net_weight\" desc) "
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testWithinGroup2() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"low_fat\", \"net_weight\" desc nulls last) "
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"low_fat\", \"net_weight\" DESC NULLS LAST)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testWithinGroup3() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"net_weight\" desc), "
+ "min(\"low_fat\")"
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC), MIN(\"low_fat\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testWithinGroup4() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"net_weight\" desc) filter (where \"net_weight\" > 0)"
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "FILTER (WHERE \"net_weight\" > 0 IS TRUE) "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testJsonValueExpressionOperator() {
String query = "select \"product_name\" format json, "
+ "\"product_name\" format json encoding utf8, "
+ "\"product_name\" format json encoding utf16, "
+ "\"product_name\" format json encoding utf32 from \"product\"";
final String expected = "SELECT \"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonExists() {
String query = "select json_exists(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_EXISTS(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonPretty() {
String query = "select json_pretty(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_PRETTY(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonValue() {
String query = "select json_value(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_VALUE(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonQuery() {
String query = "select json_query(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_QUERY(\"product_name\", 'lax $' "
+ "WITHOUT ARRAY WRAPPER NULL ON EMPTY NULL ON ERROR)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonArray() {
String query = "select json_array(\"product_name\", \"product_name\") from \"product\"";
final String expected = "SELECT JSON_ARRAY(\"product_name\", \"product_name\" ABSENT ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonArrayAgg() {
String query = "select json_arrayagg(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_ARRAYAGG(\"product_name\" ABSENT ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonObject() {
String query = "select json_object(\"product_name\": \"product_id\") from \"product\"";
final String expected = "SELECT "
+ "JSON_OBJECT(KEY \"product_name\" VALUE \"product_id\" NULL ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonObjectAgg() {
String query = "select json_objectagg(\"product_name\": \"product_id\") from \"product\"";
final String expected = "SELECT "
+ "JSON_OBJECTAGG(KEY \"product_name\" VALUE \"product_id\" NULL ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonPredicate() {
String query = "select "
+ "\"product_name\" is json, "
+ "\"product_name\" is json value, "
+ "\"product_name\" is json object, "
+ "\"product_name\" is json array, "
+ "\"product_name\" is json scalar, "
+ "\"product_name\" is not json, "
+ "\"product_name\" is not json value, "
+ "\"product_name\" is not json object, "
+ "\"product_name\" is not json array, "
+ "\"product_name\" is not json scalar "
+ "from \"product\"";
final String expected = "SELECT "
+ "\"product_name\" IS JSON VALUE, "
+ "\"product_name\" IS JSON VALUE, "
+ "\"product_name\" IS JSON OBJECT, "
+ "\"product_name\" IS JSON ARRAY, "
+ "\"product_name\" IS JSON SCALAR, "
+ "\"product_name\" IS NOT JSON VALUE, "
+ "\"product_name\" IS NOT JSON VALUE, "
+ "\"product_name\" IS NOT JSON OBJECT, "
+ "\"product_name\" IS NOT JSON ARRAY, "
+ "\"product_name\" IS NOT JSON SCALAR\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testCrossJoinEmulationForSpark() {
String query = "select * from \"employee\", \"department\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "CROSS JOIN foodmart.department";
sql(query).withSpark().ok(expected);
}
@Test void testCrossJoinEmulationForBigQuery() {
String query = "select * from \"employee\", \"department\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON TRUE";
sql(query).withBigQuery().ok(expected);
}
@Test void testSubstringInSpark() {
final String query = "select substring(\"brand_name\" from 2) "
+ "from \"product\"\n";
final String expected = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
sql(query).withSpark().ok(expected);
}
@Test void testSubstringWithForInSpark() {
final String query = "select substring(\"brand_name\" from 2 for 3) "
+ "from \"product\"\n";
final String expected = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
sql(query).withSpark().ok(expected);
}
@Test void testFloorInSpark() {
final String query = "select floor(\"hire_date\" TO MINUTE) "
+ "from \"employee\"";
final String expected = "SELECT DATE_TRUNC('MINUTE', hire_date)\n"
+ "FROM foodmart.employee";
sql(query).withSpark().ok(expected);
}
@Test void testNumericFloorInSpark() {
final String query = "select floor(\"salary\") "
+ "from \"employee\"";
final String expected = "SELECT FLOOR(salary)\n"
+ "FROM foodmart.employee";
sql(query).withSpark().ok(expected);
}
@Test void testJsonStorageSize() {
String query = "select json_storage_size(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_STORAGE_SIZE(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testCubeWithGroupBy() {
final String query = "select count(*) "
+ "from \"foodmart\".\"product\" "
+ "group by cube(\"product_id\",\"product_class_id\")";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY CUBE(\"product_id\", \"product_class_id\")";
final String expectedInSpark = "SELECT COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id, product_class_id WITH CUBE";
final String expectedPresto = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY CUBE(\"product_id\", \"product_class_id\")";
sql(query)
.ok(expected)
.withSpark()
.ok(expectedInSpark)
.withPresto()
.ok(expectedPresto);
}
@Test void testRollupWithGroupBy() {
final String query = "select count(*) "
+ "from \"foodmart\".\"product\" "
+ "group by rollup(\"product_id\",\"product_class_id\")";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_id\", \"product_class_id\")";
final String expectedInSpark = "SELECT COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id, product_class_id WITH ROLLUP";
final String expectedPresto = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_id\", \"product_class_id\")";
sql(query)
.ok(expected)
.withSpark()
.ok(expectedInSpark)
.withPresto()
.ok(expectedPresto);
}
@Test public void testCastInStringOperandOfComparison() {
final String query = "select \"employee_id\" "
+ "from \"foodmart\".\"employee\" "
+ "where 10 = cast('10' as int) and \"birth_date\" = cast('1914-02-02' as date) or "
+ "\"hire_date\" = cast('1996-01-01 '||'00:00:00' as timestamp)";
final String expected = "SELECT \"employee_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "WHERE 10 = '10' AND \"birth_date\" = '1914-02-02' OR \"hire_date\" = '1996-01-01 ' || "
+ "'00:00:00'";
final String expectedBiqquery = "SELECT employee_id\n"
+ "FROM foodmart.employee\n"
+ "WHERE 10 = CAST('10' AS INT64) AND birth_date = '1914-02-02' OR hire_date = CAST"
+ "(CONCAT('1996-01-01 ', '00:00:00') AS DATETIME)";
final String mssql = "SELECT [employee_id]\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE 10 = '10' AND [birth_date] = '1914-02-02' OR [hire_date] = CONCAT('1996-01-01 ', '00:00:00')";
sql(query)
.ok(expected)
.withBigQuery()
.ok(expectedBiqquery)
.withMssql()
.ok(mssql);
}
@Test public void testRegexSubstrFunction2Args() {
final String query = "select regexp_substr('choco chico chipo', '.*cho*p*c*?.*')"
+ "from \"foodmart\".\"product\"";
final String expected = "SELECT REGEXP_SUBSTR('choco chico chipo', '.*cho*p*c*?.*')\n"
+ "FROM foodmart.product";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction3Args() {
final String query = "select \"product_id\", regexp_substr('choco chico chipo', "
+ "'.*cho*p*c*?.*', 7)\n"
+ "from \"foodmart\".\"product\" where \"product_id\" = 1";
final String expected = "SELECT product_id, REGEXP_SUBSTR('choco chico chipo', "
+ "'.*cho*p*c*?.*', 7)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction4Args() {
final String query = "select \"product_id\", regexp_substr('chocolate chip cookies', 'c+.{2}',"
+ " 4, 2)\n"
+ "from \"foodmart\".\"product\" where \"product_id\" in (1, 2, 3)";
final String expected = "SELECT product_id, REGEXP_SUBSTR('chocolate chip "
+ "cookies', 'c+.{2}', 4, 2)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1 OR product_id = 2 OR product_id = 3";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction5Args() {
final String query = "select regexp_substr('chocolate Chip cookies', 'c+.{2}',"
+ " 1, 2, 'i')\n"
+ "from \"foodmart\".\"product\" where \"product_id\" in (1, 2, 3, 4)";
final String expected = "SELECT "
+ "REGEXP_SUBSTR('chocolate Chip cookies', '(?i)c+.{2}', 1, 2)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1 OR product_id = 2 OR product_id = 3 OR product_id = 4";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction5ArgswithBackSlash() {
final String query = "select regexp_substr('chocolate Chip cookies','[-\\_] V[0-9]+',"
+ "1,1,'i')\n"
+ "from \"foodmart\".\"product\" where \"product_id\" in (1, 2, 3, 4)";
final String expected = "SELECT "
+ "REGEXP_SUBSTR('chocolate Chip cookies', '(?i)[-\\\\_] V[0-9]+', 1, 1)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1 OR product_id = 2 OR product_id = 3 OR product_id = 4";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testTimestampFunctionRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode currentTimestampRexNode = builder.call(SqlLibraryOperators.CURRENT_TIMESTAMP,
builder.literal(6));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(currentTimestampRexNode, "CT"))
.build();
final String expectedSql = "SELECT CURRENT_TIMESTAMP(6) AS \"CT\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT CAST(FORMAT_TIMESTAMP('%F %H:%M:%E6S', "
+ "CURRENT_DATETIME()) AS DATETIME) AS CT\n"
+ "FROM scott.EMP";
final String expectedSpark = "SELECT CAST(DATE_FORMAT(CURRENT_TIMESTAMP, 'yyyy-MM-dd HH:mm:ss"
+ ".SSSSSS') AS TIMESTAMP) CT\nFROM scott.EMP";
final String expectedHive = "SELECT CAST(DATE_FORMAT(CURRENT_TIMESTAMP, 'yyyy-MM-dd HH:mm:ss"
+ ".ssssss') AS TIMESTAMP) CT\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
}
@Test void testJsonType() {
String query = "select json_type(\"product_name\") from \"product\"";
final String expected = "SELECT "
+ "JSON_TYPE(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonDepth() {
String query = "select json_depth(\"product_name\") from \"product\"";
final String expected = "SELECT "
+ "JSON_DEPTH(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonLength() {
String query = "select json_length(\"product_name\", 'lax $'), "
+ "json_length(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_LENGTH(\"product_name\", 'lax $'), "
+ "JSON_LENGTH(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonKeys() {
String query = "select json_keys(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_KEYS(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test public void testDateSubIntervalMonthFunction() {
String query = "select \"birth_date\" - INTERVAL -'1' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, -1)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - INTERVAL '1' MONTH\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL -1 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testDatePlusIntervalMonthFunctionWithArthOps() {
String query = "select \"birth_date\" + -10 * INTERVAL '1' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, -10)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + -10 * INTERVAL '1' MONTH\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL -10 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testTimestampPlusIntervalMonthFunctionWithArthOps() {
String query = "select \"hire_date\" + -10 * INTERVAL '1' MONTH from \"employee\"";
final String expectedBigQuery = "SELECT CAST(DATETIME_ADD(CAST(hire_date AS DATETIME), "
+ "INTERVAL "
+ "-10 MONTH) AS DATETIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testDatePlusIntervalMonthFunctionWithCol() {
String query = "select \"birth_date\" + \"store_id\" * INTERVAL '10' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, store_id * 10)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * INTERVAL '10' MONTH\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id * 10 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testDatePlusIntervalMonthFunctionWithArithOp() {
String query = "select \"birth_date\" + 10 * INTERVAL '2' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, 10 * 2)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + 10 * INTERVAL '2' MONTH\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 10 * 2 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testDatePlusColumnFunction() {
String query = "select \"birth_date\" + INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 1, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateSubColumnFunction() {
String query = "select \"birth_date\" - INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - INTERVAL '1' DAY\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, -1, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateValuePlusColumnFunction() {
String query = "select DATE'2018-01-01' + INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(DATE '2018-01-01', 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT DATE '2018-01-01' + INTERVAL '1' DAY\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(DATE '2018-01-01', INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 1, DATE '2018-01-01')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateValueSubColumnFunction() {
String query = "select DATE'2018-01-01' - INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(DATE '2018-01-01', 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT DATE '2018-01-01' - INTERVAL '1' DAY\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(DATE '2018-01-01', INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, -1, DATE '2018-01-01')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateIntColumnFunction() {
String query = "select \"birth_date\" + INTERVAL '2' day from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 2) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '2' DAY\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 2 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 2, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testIntervalMinute() {
String query = "select cast(\"birth_date\" as timestamp) + INTERVAL\n"
+ "'2' minute from \"employee\"";
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(CAST(birth_date AS "
+ "DATETIME), INTERVAL 2 MINUTE)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testIntervalHour() {
String query = "select cast(\"birth_date\" as timestamp) + INTERVAL\n"
+ "'2' hour from \"employee\"";
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(CAST(birth_date AS "
+ "DATETIME), INTERVAL 2 HOUR)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testIntervalSecond() {
String query = "select cast(\"birth_date\" as timestamp) + INTERVAL '2'\n"
+ "second from \"employee\"";
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(CAST(birth_date AS"
+ " DATETIME), INTERVAL 2 SECOND)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testDateSubInterFunction() {
String query = "select \"birth_date\" - INTERVAL '2' day from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, 2) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - INTERVAL '2' DAY"
+ "\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL 2 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, -2, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusColumnVariFunction() {
String query = "select \"birth_date\" + \"store_id\" * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * INTERVAL '1' DAY"
+ "\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + \"store_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusIntervalColumnFunction() {
String query = "select \"birth_date\" + INTERVAL '1' DAY * \"store_id\" from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY * store_id\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, '1' * \"store_id\", \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusIntervalIntFunction() {
String query = "select \"birth_date\" + INTERVAL '1' DAY * 10 from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 10) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY * 10\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 10 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, '1' * 10, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateSubColumnVariFunction() {
String query = "select \"birth_date\" - \"store_id\" * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - store_id * INTERVAL '1' DAY"
+ "\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" - \"store_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateValuePlusColumnVariFunction() {
String query = "select DATE'2018-01-01' + \"store_id\" * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(DATE '2018-01-01', store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT DATE '2018-01-01' + store_id * INTERVAL '1' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(DATE '2018-01-01', INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (DATE '2018-01-01' + \"store_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusColumnFunctionWithArithOp() {
String query = "select \"birth_date\" + \"store_id\" *11 * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id * 11) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * 11 * INTERVAL '1' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id * 11 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + \"store_id\" * 11)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusColumnFunctionVariWithArithOp() {
String query = "select \"birth_date\" + \"store_id\" * INTERVAL '11' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id * 11) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * INTERVAL '11' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id * 11 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + \"store_id\" * 11)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateSubColumnFunctionVariWithArithOp() {
String query = "select \"birth_date\" - \"store_id\" * INTERVAL '11' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, store_id * 11) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - store_id * INTERVAL '11' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL store_id * 11 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" - \"store_id\" * 11)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusIntervalDayFunctionWithArithOp() {
String query = "select \"birth_date\" + 10 * INTERVAL '2' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 10 * 2) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + 10 * INTERVAL '2' DAY\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 10 * 2 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + 10 * 2)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testIntervalDayPlusDateFunction() {
String query = "select INTERVAL '1' DAY + \"birth_date\" from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 1, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testIntervalHourToSecond() {
String query = "SELECT CURRENT_TIMESTAMP + INTERVAL '06:10:30' HOUR TO SECOND,"
+ "CURRENT_TIMESTAMP - INTERVAL '06:10:30' HOUR TO SECOND "
+ "FROM \"employee\"";
final String expectedBQ = "SELECT TIMESTAMP_ADD(CURRENT_DATETIME(), INTERVAL 22230 SECOND), "
+ "TIMESTAMP_SUB(CURRENT_DATETIME(), INTERVAL 22230 SECOND)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testUnparseMinusCallWithReturnTypeOfTimestampWithZoneToTimestampSub() {
final RelBuilder relBuilder = relBuilder();
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final RexLiteral literalTimestampLTZ =
rexBuilder.makeTimestampWithLocalTimeZoneLiteral(
new TimestampString(2022, 2, 18, 8, 23, 45), 0);
final RexLiteral intervalLiteral = rexBuilder.makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO));
final RexNode minusCall =
relBuilder.call(SqlStdOperatorTable.MINUS, literalTimestampLTZ, intervalLiteral);
final RelNode root = relBuilder
.values(new String[] {"c"}, 1)
.project(minusCall)
.build();
final String expectedBigQuery = "SELECT TIMESTAMP_SUB(TIMESTAMP '2022-02-18 08:23:45'"
+ ", INTERVAL 1 MICROSECOND) AS `$f0`";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBigQuery));
}
@Test public void testUnparsePlusCallWithReturnTypeOfTimestampWithZoneToTimestampAdd() {
final RelBuilder relBuilder = relBuilder();
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final RexLiteral literalTimestampLTZ =
rexBuilder.makeTimestampWithLocalTimeZoneLiteral(
new TimestampString(2022, 2, 18, 8, 23, 45), 0);
final RexLiteral intervalLiteral = rexBuilder.makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO));
final RexNode plusCall =
relBuilder.call(SqlStdOperatorTable.PLUS, literalTimestampLTZ, intervalLiteral);
final RelNode root = relBuilder
.values(new String[] {"c"}, 1)
.project(plusCall)
.build();
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(TIMESTAMP '2022-02-18 08:23:45',"
+ " INTERVAL 1 MICROSECOND) AS `$f0`";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBigQuery));
}
@Test public void truncateFunctionEmulationForBigQuery() {
String query = "select truncate(2.30259, 3) from \"employee\"";
final String expectedBigQuery = "SELECT TRUNC(2.30259, 3)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery().ok(expectedBigQuery);
}
@Test public void truncateFunctionWithSingleOperandEmulationForBigQuery() {
String query = "select truncate(2.30259) from \"employee\"";
final String expectedBigQuery = "SELECT TRUNC(2.30259)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery().ok(expectedBigQuery);
}
@Test public void extractFunctionEmulation() {
String query = "select extract(year from \"hire_date\") from \"employee\"";
final String expectedHive = "SELECT YEAR(hire_date)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT YEAR(hire_date)\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT EXTRACT(YEAR FROM hire_date)\n"
+ "FROM foodmart.employee";
final String expectedMsSql = "SELECT YEAR([hire_date])\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(expectedMsSql);
}
@Test public void extractMinuteFunctionEmulation() {
String query = "select extract(minute from \"hire_date\") from \"employee\"";
final String expectedBigQuery = "SELECT EXTRACT(MINUTE FROM hire_date)\n"
+ "FROM foodmart.employee";
final String expectedMsSql = "SELECT DATEPART(MINUTE, [hire_date])\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(expectedMsSql);
}
@Test public void extractSecondFunctionEmulation() {
String query = "select extract(second from \"hire_date\") from \"employee\"";
final String expectedBigQuery = "SELECT EXTRACT(SECOND FROM hire_date)\n"
+ "FROM foodmart.employee";
final String expectedMsSql = "SELECT DATEPART(SECOND, [hire_date])\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(expectedMsSql);
}
@Test public void selectWithoutFromEmulationForHiveAndSparkAndBigquery() {
String query = "select 2 + 2";
final String expected = "SELECT 2 + 2";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expected);
}
@Test public void currentTimestampFunctionForHiveAndSparkAndBigquery() {
String query = "select current_timestamp";
final String expectedHiveQuery = "SELECT CURRENT_TIMESTAMP `CURRENT_TIMESTAMP`";
final String expectedSparkQuery = "SELECT CURRENT_TIMESTAMP `CURRENT_TIMESTAMP`";
final String expectedBigQuery = "SELECT CURRENT_DATETIME() AS CURRENT_TIMESTAMP";
sql(query)
.withHiveIdentifierQuoteString()
.ok(expectedHiveQuery)
.withSparkIdentifierQuoteString()
.ok(expectedSparkQuery)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void concatFunctionEmulationForHiveAndSparkAndBigQuery() {
String query = "select 'foo' || 'bar' from \"employee\"";
final String expected = "SELECT CONCAT('foo', 'bar')\n"
+ "FROM foodmart.employee";
final String mssql = "SELECT CONCAT('foo', 'bar')\n"
+ "FROM [foodmart].[employee]";
final String expectedSpark = "SELECT 'foo' || 'bar'\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(mssql);
}
@Test void testJsonRemove() {
String query = "select json_remove(\"product_name\", '$[0]') from \"product\"";
final String expected = "SELECT JSON_REMOVE(\"product_name\", '$[0]')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
/*
@Test void testUnionAllWithNoOperandsUsingOracleDialect() {
String query = "select A.\"department_id\" "
+ "from \"foodmart\".\"employee\" A "
+ " where A.\"department_id\" = ( select min( A.\"department_id\") from \"foodmart\""
+ ".\"department\" B where 1=2 )";
final String expected = "SELECT \"employee\".\"department_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN (SELECT \"t1\".\"department_id\" \"department_id0\", MIN(\"t1\""
+ ".\"department_id\") \"EXPR$0\"\n"
+ "FROM (SELECT NULL \"department_id\", NULL \"department_description\"\nFROM "
+ "\"DUAL\"\nWHERE 1 = 0) \"t\",\n"
+ "(SELECT \"department_id\"\nFROM \"foodmart\".\"employee\"\nGROUP BY \"department_id\")"
+ " \"t1\"\n"
+ "GROUP BY \"t1\".\"department_id\") \"t3\" ON \"employee\".\"department_id\" = \"t3\""
+ ".\"department_id0\""
+ " AND \"employee\".\"department_id\" = \"t3\".\"EXPR$0\"";
sql(query).withOracle().ok(expected);
}*/
/*@Test void testUnionAllWithNoOperands() {
String query = "select A.\"department_id\" "
+ "from \"foodmart\".\"employee\" A "
+ " where A.\"department_id\" = ( select min( A.\"department_id\") from \"foodmart\""
+ ".\"department\" B where 1=2 )";
final String expected = "SELECT \"employee\".\"department_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN (SELECT \"t1\".\"department_id\" AS \"department_id0\","
+ " MIN(\"t1\".\"department_id\") AS \"EXPR$0\"\n"
+ "FROM (SELECT *\nFROM (VALUES (NULL, NULL))"
+ " AS \"t\" (\"department_id\", \"department_description\")"
+ "\nWHERE 1 = 0) AS \"t\","
+ "\n(SELECT \"department_id\"\nFROM \"foodmart\".\"employee\""
+ "\nGROUP BY \"department_id\") AS \"t1\""
+ "\nGROUP BY \"t1\".\"department_id\") AS \"t3\" "
+ "ON \"employee\".\"department_id\" = \"t3\".\"department_id0\""
+ " AND \"employee\".\"department_id\" = \"t3\".\"EXPR$0\"";
sql(query).ok(expected);
}*/
@Test void testSmallintOracle() {
String query = "SELECT CAST(\"department_id\" AS SMALLINT) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS NUMBER(5))\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testBigintOracle() {
String query = "SELECT CAST(\"department_id\" AS BIGINT) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS NUMBER(19))\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testDoubleOracle() {
String query = "SELECT CAST(\"department_id\" AS DOUBLE) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS DOUBLE PRECISION)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testDateLiteralOracle() {
String query = "SELECT DATE '1978-05-02' FROM \"employee\"";
String expected = "SELECT TO_DATE('1978-05-02', 'YYYY-MM-DD')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testTimestampLiteralOracle() {
String query = "SELECT TIMESTAMP '1978-05-02 12:34:56.78' FROM \"employee\"";
String expected = "SELECT TO_TIMESTAMP('1978-05-02 12:34:56.78',"
+ " 'YYYY-MM-DD HH24:MI:SS.FF')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testTimeLiteralOracle() {
String query = "SELECT TIME '12:34:56.78' FROM \"employee\"";
String expected = "SELECT TO_TIME('12:34:56.78', 'HH24:MI:SS.FF')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test public void testSelectWithGroupByOnColumnNotPresentInProjection() {
String query = "select \"t1\".\"department_id\" from\n"
+ "\"foodmart\".\"employee\" as \"t1\" inner join \"foodmart\".\"department\" as \"t2\"\n"
+ "on \"t1\".\"department_id\" = \"t2\".\"department_id\"\n"
+ "group by \"t2\".\"department_id\", \"t1\".\"department_id\"";
final String expected = "SELECT t0.department_id\n"
+ "FROM (SELECT department.department_id AS department_id0, employee.department_id\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id\n"
+ "GROUP BY department_id0, employee.department_id) AS t0";
sql(query).withBigQuery().ok(expected);
}
@Test void testSupportsDataType() {
final RelDataTypeFactory typeFactory =
new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataType booleanDataType = typeFactory.createSqlType(SqlTypeName.BOOLEAN);
final RelDataType integerDataType = typeFactory.createSqlType(SqlTypeName.INTEGER);
final SqlDialect oracleDialect = SqlDialect.DatabaseProduct.ORACLE.getDialect();
assertFalse(oracleDialect.supportsDataType(booleanDataType));
assertTrue(oracleDialect.supportsDataType(integerDataType));
final SqlDialect postgresqlDialect = SqlDialect.DatabaseProduct.POSTGRESQL.getDialect();
assertTrue(postgresqlDialect.supportsDataType(booleanDataType));
assertTrue(postgresqlDialect.supportsDataType(integerDataType));
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-4150">[CALCITE-4150]
* JDBC adapter throws UnsupportedOperationException when generating SQL
* for untyped NULL literal</a>. */
@Test void testSelectRawNull() {
final String query = "SELECT NULL FROM \"product\"";
final String expected = "SELECT NULL\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testSelectRawNullWithAlias() {
final String query = "SELECT NULL AS DUMMY FROM \"product\"";
final String expected = "SELECT NULL AS \"DUMMY\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testSelectNullWithCast() {
final String query = "SELECT CAST(NULL AS INT)";
final String expected = "SELECT *\n"
+ "FROM (VALUES (NULL)) AS \"t\" (\"EXPR$0\")";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithCount() {
final String query = "SELECT COUNT(CAST(NULL AS INT))";
final String expected = "SELECT COUNT(\"$f0\")\n"
+ "FROM (VALUES (NULL)) AS \"t\" (\"$f0\")";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithGroupByNull() {
final String query = "SELECT COUNT(CAST(NULL AS INT))\n"
+ "FROM (VALUES (0))AS \"t\"\n"
+ "GROUP BY CAST(NULL AS VARCHAR CHARACTER SET \"ISO-8859-1\")";
final String expected = "SELECT COUNT(\"$f1\")\n"
+ "FROM (VALUES (NULL, NULL)) AS \"t\" (\"$f0\", \"$f1\")\n"
+ "GROUP BY \"$f0\"";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithGroupByVar() {
final String query = "SELECT COUNT(CAST(NULL AS INT))\n"
+ "FROM \"account\" AS \"t\"\n"
+ "GROUP BY \"account_type\"";
final String expected = "SELECT COUNT(CAST(NULL AS INTEGER))\n"
+ "FROM \"foodmart\".\"account\"\n"
+ "GROUP BY \"account_type\"";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithInsert() {
final String query = "insert into\n"
+ "\"account\"(\"account_id\",\"account_parent\",\"account_type\",\"account_rollup\")\n"
+ "select 1, cast(NULL AS INT), cast(123 as varchar), cast(123 as varchar)";
final String expected = "INSERT INTO \"foodmart\".\"account\" ("
+ "\"account_id\", \"account_parent\", \"account_description\", "
+ "\"account_type\", \"account_rollup\", \"Custom_Members\")\n"
+ "(SELECT \"EXPR$0\" AS \"account_id\","
+ " \"EXPR$1\" AS \"account_parent\","
+ " CAST(NULL AS VARCHAR(30) CHARACTER SET \"ISO-8859-1\") "
+ "AS \"account_description\","
+ " \"EXPR$2\" AS \"account_type\", "
+ "\"EXPR$3\" AS \"account_rollup\","
+ " CAST(NULL AS VARCHAR(255) CHARACTER SET \"ISO-8859-1\") "
+ "AS \"Custom_Members\"\n"
+ "FROM (VALUES (1, NULL, '123', '123')) "
+ "AS \"t\" (\"EXPR$0\", \"EXPR$1\", \"EXPR$2\", \"EXPR$3\"))";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithInsertFromJoin() {
final String query = "insert into\n"
+ "\"account\"(\"account_id\",\"account_parent\",\n"
+ "\"account_type\",\"account_rollup\")\n"
+ "select \"product\".\"product_id\",\n"
+ "cast(NULL AS INT),\n"
+ "cast(\"product\".\"product_id\" as varchar),\n"
+ "cast(\"sales_fact_1997\".\"store_id\" as varchar)\n"
+ "from \"product\"\n"
+ "inner join \"sales_fact_1997\"\n"
+ "on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"";
final String expected = "INSERT INTO \"foodmart\".\"account\" "
+ "(\"account_id\", \"account_parent\", \"account_description\", "
+ "\"account_type\", \"account_rollup\", \"Custom_Members\")\n"
+ "(SELECT \"product\".\"product_id\" AS \"account_id\", "
+ "CAST(NULL AS INTEGER) AS \"account_parent\", CAST(NULL AS VARCHAR"
+ "(30) CHARACTER SET \"ISO-8859-1\") AS \"account_description\", "
+ "CAST(\"product\".\"product_id\" AS VARCHAR CHARACTER SET "
+ "\"ISO-8859-1\") AS \"account_type\", "
+ "CAST(\"sales_fact_1997\".\"store_id\" AS VARCHAR CHARACTER SET \"ISO-8859-1\") AS "
+ "\"account_rollup\", "
+ "CAST(NULL AS VARCHAR(255) CHARACTER SET \"ISO-8859-1\") AS \"Custom_Members\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" "
+ "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\")";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testCastDecimalOverflow() {
final String query =
"SELECT CAST('11111111111111111111111111111111.111111' AS DECIMAL(38,6)) AS \"num\" from \"product\"";
final String expected =
"SELECT CAST('11111111111111111111111111111111.111111' AS DECIMAL(19, 6)) AS \"num\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
final String query2 =
"SELECT CAST(1111111 AS DECIMAL(5,2)) AS \"num\" from \"product\"";
final String expected2 =
"SELECT CAST(1111111 AS DECIMAL(5, 2)) AS \"num\"\nFROM \"foodmart\".\"product\"";
sql(query2).ok(expected2);
}
@Test void testCastInStringIntegerComparison() {
final String query = "select \"employee_id\" "
+ "from \"foodmart\".\"employee\" "
+ "where 10 = cast('10' as int) and \"birth_date\" = cast('1914-02-02' as date) or "
+ "\"hire_date\" = cast('1996-01-01 '||'00:00:00' as timestamp)";
final String expected = "SELECT \"employee_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "WHERE 10 = '10' AND \"birth_date\" = '1914-02-02' OR \"hire_date\" = '1996-01-01 ' || "
+ "'00:00:00'";
final String expectedBiqquery = "SELECT employee_id\n"
+ "FROM foodmart.employee\n"
+ "WHERE 10 = CAST('10' AS INT64) AND birth_date = '1914-02-02' OR hire_date = "
+ "CAST(CONCAT('1996-01-01 ', '00:00:00') AS DATETIME)";
sql(query)
.ok(expected)
.withBigQuery()
.ok(expectedBiqquery);
}
@Test void testDialectQuoteStringLiteral() {
dialects().forEach((dialect, databaseProduct) -> {
assertThat(dialect.quoteStringLiteral(""), is("''"));
assertThat(dialect.quoteStringLiteral("can't run"),
databaseProduct == DatabaseProduct.BIG_QUERY
? is("'can\\'t run'")
: is("'can''t run'"));
assertThat(dialect.unquoteStringLiteral("''"), is(""));
if (databaseProduct == DatabaseProduct.BIG_QUERY) {
assertThat(dialect.unquoteStringLiteral("'can\\'t run'"),
is("can't run"));
} else {
assertThat(dialect.unquoteStringLiteral("'can't run'"),
is("can't run"));
}
});
}
@Test public void testToNumberFunctionHandlingHexaToInt() {
String query = "select TO_NUMBER('03ea02653f6938ba','XXXXXXXXXXXXXXXX')";
final String expected = "SELECT CAST(CONV('03ea02653f6938ba', 16, 10) AS BIGINT)";
final String expectedBigQuery = "SELECT CAST(CONCAT('0x', '03ea02653f6938ba') AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('03ea02653f6938ba', 'XXXXXXXXXXXXXXXX')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingFloatingPoint() {
String query = "select TO_NUMBER('-1.7892','9.9999')";
final String expected = "SELECT CAST('-1.7892' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-1.7892' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1.7892', 38, 4)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionWithColumns() {
String query = "SELECT TO_NUMBER(\"first_name\", '000') FROM \"foodmart\""
+ ".\"employee\"";
final String expectedBigQuery = "SELECT CAST(first_name AS INT64)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testOver() {
String query = "SELECT distinct \"product_id\", MAX(\"product_id\") \n"
+ "OVER(PARTITION BY \"product_id\") AS abc\n"
+ "FROM \"product\"";
final String expected = "SELECT product_id, MAX(product_id) OVER "
+ "(PARTITION BY product_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) ABC\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id, MAX(product_id) OVER (PARTITION BY product_id "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)";
final String expectedBQ = "SELECT product_id, ABC\n"
+ "FROM (SELECT product_id, MAX(product_id) OVER "
+ "(PARTITION BY product_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS ABC\n"
+ "FROM foodmart.product) AS t\n"
+ "GROUP BY product_id, ABC";
final String expectedSnowFlake = "SELECT \"product_id\", MAX(\"product_id\") OVER "
+ "(PARTITION BY \"product_id\" ORDER BY \"product_id\" ROWS "
+ "BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS \"ABC\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\", MAX(\"product_id\") OVER (PARTITION BY \"product_id\" "
+ "ORDER BY \"product_id\" ROWS BETWEEN UNBOUNDED PRECEDING AND "
+ "UNBOUNDED FOLLOWING)";
final String mssql = "SELECT [product_id], MAX([product_id]) OVER (PARTITION "
+ "BY [product_id] ORDER BY [product_id] ROWS BETWEEN UNBOUNDED PRECEDING AND "
+ "UNBOUNDED FOLLOWING) AS [ABC]\n"
+ "FROM [foodmart].[product]\n"
+ "GROUP BY [product_id], MAX([product_id]) OVER (PARTITION BY [product_id] "
+ "ORDER BY [product_id] ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(mssql);
}
@Test public void testNtileFunction() {
String query = "SELECT ntile(2)\n"
+ "OVER(order BY \"product_id\") AS abc\n"
+ "FROM \"product\"";
final String expectedBQ = "SELECT NTILE(2) OVER (ORDER BY product_id IS NULL, product_id) "
+ "AS ABC\n"
+ "FROM foodmart.product";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testCountWithWindowFunction() {
String query = "Select count(*) over() from \"product\"";
String expected = "SELECT COUNT(*) OVER (RANGE BETWEEN UNBOUNDED PRECEDING "
+ "AND UNBOUNDED FOLLOWING)\n"
+ "FROM foodmart.product";
String expectedBQ = "SELECT COUNT(*) OVER (RANGE BETWEEN UNBOUNDED PRECEDING "
+ "AND UNBOUNDED FOLLOWING)\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT COUNT(*) OVER (ORDER BY 0 "
+ "ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)\n"
+ "FROM \"foodmart\".\"product\"";
final String mssql = "SELECT COUNT(*) OVER ()\n"
+ "FROM [foodmart].[product]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(mssql);
}
@Test public void testOrderByInWindowFunction() {
String query = "select \"first_name\", COUNT(\"department_id\") as "
+ "\"department_id_number\", ROW_NUMBER() OVER (ORDER BY "
+ "\"department_id\" ASC), SUM(\"department_id\") OVER "
+ "(ORDER BY \"department_id\" ASC) \n"
+ "from \"foodmart\".\"employee\" \n"
+ "GROUP by \"first_name\", \"department_id\"";
final String expected = "SELECT first_name, department_id_number, ROW_NUMBER() "
+ "OVER (ORDER BY department_id IS NULL, department_id), SUM(department_id) "
+ "OVER (ORDER BY department_id IS NULL, department_id "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT first_name, department_id, COUNT(*) department_id_number\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, department_id) t0";
final String expectedSpark = "SELECT first_name, department_id_number, ROW_NUMBER() "
+ "OVER (ORDER BY department_id NULLS LAST), SUM(department_id) "
+ "OVER (ORDER BY department_id NULLS LAST "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT first_name, department_id, COUNT(*) department_id_number\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, department_id) t0";
final String expectedBQ = "SELECT first_name, department_id_number, "
+ "ROW_NUMBER() OVER (ORDER BY department_id IS NULL, department_id), SUM(department_id) "
+ "OVER (ORDER BY department_id IS NULL, department_id "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT first_name, department_id, COUNT(*) AS department_id_number\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, department_id) AS t0";
final String expectedSnowFlake = "SELECT \"first_name\", \"department_id_number\", "
+ "ROW_NUMBER() OVER (ORDER BY \"department_id\"), SUM(\"department_id\") "
+ "OVER (ORDER BY \"department_id\" RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT \"first_name\", \"department_id\", COUNT(*) AS \"department_id_number\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"first_name\", \"department_id\") AS \"t0\"";
final String mssql = "SELECT [first_name], [department_id_number], ROW_NUMBER()"
+ " OVER (ORDER BY CASE WHEN [department_id] IS NULL THEN 1 ELSE 0 END,"
+ " [department_id]), SUM([department_id]) OVER (ORDER BY CASE WHEN [department_id] IS NULL"
+ " THEN 1 ELSE 0 END, [department_id] RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT [first_name], [department_id], COUNT(*) AS [department_id_number]\n"
+ "FROM [foodmart].[employee]\n"
+ "GROUP BY [first_name], [department_id]) AS [t0]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(mssql);
}
@Test public void testToNumberFunctionHandlingFloatingPointWithD() {
String query = "select TO_NUMBER('1.789','9D999')";
final String expected = "SELECT CAST('1.789' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('1.789' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1.789', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithSingleFloatingPoint() {
String query = "select TO_NUMBER('1.789')";
final String expected = "SELECT CAST('1.789' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('1.789' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1.789', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithComma() {
String query = "SELECT TO_NUMBER ('1,789', '9,999')";
final String expected = "SELECT CAST('1789' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1789' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1,789', '9,999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrency() {
String query = "SELECT TO_NUMBER ('$1789', '$9999')";
final String expected = "SELECT CAST('1789' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1789' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('$1789', '$9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyAndL() {
String query = "SELECT TO_NUMBER ('$1789', 'L9999')";
final String expected = "SELECT CAST('1789' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1789' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('$1789', '$9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMinus() {
String query = "SELECT TO_NUMBER ('-12334', 'S99999')";
final String expected = "SELECT CAST('-12334' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-12334' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-12334', 'S99999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMinusLast() {
String query = "SELECT TO_NUMBER ('12334-', '99999S')";
final String expected = "SELECT CAST('-12334' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-12334' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('12334-', '99999S')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithE() {
String query = "SELECT TO_NUMBER ('12E3', '99EEEE')";
final String expected = "SELECT CAST('12E3' AS DECIMAL(19, 0))";
final String expectedBigQuery = "SELECT CAST('12E3' AS NUMERIC)";
final String expectedSnowFlake = "SELECT TO_NUMBER('12E3', '99EEEE')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyName() {
String query = "SELECT TO_NUMBER('dollar1234','L9999','NLS_CURRENCY=''dollar''')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyNameFloat() {
String query = "SELECT TO_NUMBER('dollar12.34','L99D99','NLS_CURRENCY=''dollar''')";
final String expected = "SELECT CAST('12.34' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('12.34' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('12.34', 38, 2)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyNameNull() {
String query = "SELECT TO_NUMBER('dollar12.34','L99D99',null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyNameMinus() {
String query = "SELECT TO_NUMBER('-dollar1234','L9999','NLS_CURRENCY=''dollar''')";
final String expected = "SELECT CAST('-1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithG() {
String query = "SELECT TO_NUMBER ('1,2345', '9G9999')";
final String expected = "SELECT CAST('12345' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('12345' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1,2345', '9G9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithU() {
String query = "SELECT TO_NUMBER ('$1234', 'U9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('$1234', '$9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithPR() {
String query = "SELECT TO_NUMBER (' 123 ', '999PR')";
final String expected = "SELECT CAST('123' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('123' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('123')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMI() {
String query = "SELECT TO_NUMBER ('1234-', '9999MI')";
final String expected = "SELECT CAST('-1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234-', '9999MI')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMIDecimal() {
String query = "SELECT TO_NUMBER ('1.234-', '9.999MI')";
final String expected = "SELECT CAST('-1.234' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-1.234' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1.234', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithZero() {
String query = "select TO_NUMBER('01234','09999')";
final String expected = "SELECT CAST('01234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('01234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('01234', '09999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithB() {
String query = "select TO_NUMBER('1234','B9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234', 'B9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithC() {
String query = "select TO_NUMBER('USD1234','C9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandling() {
final String query = "SELECT TO_NUMBER ('1234', '9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234', '9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingSingleArgumentInt() {
final String query = "SELECT TO_NUMBER ('1234')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingSingleArgumentFloat() {
final String query = "SELECT TO_NUMBER ('-1.234')";
final String expected = "SELECT CAST('-1.234' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-1.234' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1.234', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingNull() {
final String query = "SELECT TO_NUMBER ('-1.234',null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingNullOperand() {
final String query = "SELECT TO_NUMBER (null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingSecoNull() {
final String query = "SELECT TO_NUMBER(null,'9D99')";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingFunctionAsArgument() {
final String query = "SELECT TO_NUMBER(SUBSTRING('12345',2))";
final String expected = "SELECT CAST(SUBSTRING('12345', 2) AS BIGINT)";
final String expectedSpark = "SELECT CAST(SUBSTRING('12345', 2) AS BIGINT)";
final String expectedBigQuery = "SELECT CAST(SUBSTR('12345', 2) AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(SUBSTR('12345', 2))";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithNullArgument() {
final String query = "SELECT TO_NUMBER (null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingCaseWhenThen() {
final String query = "select case when TO_NUMBER('12.77') is not null then "
+ "'is_numeric' else 'is not numeric' end";
final String expected = "SELECT CASE WHEN CAST('12.77' AS FLOAT) IS NOT NULL THEN "
+ "'is_numeric ' ELSE 'is not numeric' END";
final String expectedBigQuery = "SELECT CASE WHEN CAST('12.77' AS FLOAT64) IS NOT NULL THEN "
+ "'is_numeric ' ELSE 'is not numeric' END";
final String expectedSnowFlake = "SELECT CASE WHEN TO_NUMBER('12.77', 38, 2) IS NOT NULL THEN"
+ " 'is_numeric ' ELSE 'is not numeric' END";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithGDS() {
String query = "SELECT TO_NUMBER ('12,454.8-', '99G999D9S')";
final String expected = "SELECT CAST('-12454.8' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-12454.8' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-12454.8', 38, 1)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expected);
}
@Test public void testAscii() {
String query = "SELECT ASCII ('ABC')";
final String expected = "SELECT ASCII('ABC')";
final String expectedBigQuery = "SELECT TO_CODE_POINTS('ABC') [OFFSET(0)]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withHive()
.ok(expected)
.withSpark()
.ok(expected);
}
@Test public void testAsciiMethodArgument() {
String query = "SELECT ASCII (SUBSTRING('ABC',1,1))";
final String expected = "SELECT ASCII(SUBSTRING('ABC', 1, 1))";
final String expectedSpark = "SELECT ASCII(SUBSTRING('ABC', 1, 1))";
final String expectedBigQuery = "SELECT TO_CODE_POINTS(SUBSTR('ABC', 1, 1)) [OFFSET(0)]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test public void testAsciiColumnArgument() {
final String query = "select ASCII(\"product_name\") from \"product\" ";
final String bigQueryExpected = "SELECT TO_CODE_POINTS(product_name) [OFFSET(0)]\n"
+ "FROM foodmart.product";
final String hiveExpected = "SELECT ASCII(product_name)\n"
+ "FROM foodmart.product";
sql(query)
.withBigQuery()
.ok(bigQueryExpected)
.withHive()
.ok(hiveExpected);
}
@Test public void testNullIfFunctionRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode nullifRexNode = builder.call(SqlStdOperatorTable.NULLIF,
builder.scan("EMP").field(0), builder.literal(20));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(nullifRexNode, "NI"))
.build();
final String expectedSql = "SELECT NULLIF(\"EMPNO\", 20) AS \"NI\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT NULLIF(EMPNO, 20) AS NI\n"
+ "FROM scott.EMP";
final String expectedSpark = "SELECT NULLIF(EMPNO, 20) NI\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT IF(EMPNO = 20, NULL, EMPNO) NI\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
}
@Test public void testCurrentUser() {
String query = "select CURRENT_USER";
final String expectedSql = "SELECT CURRENT_USER() CURRENT_USER";
final String expectedSqlBQ = "SELECT SESSION_USER() AS CURRENT_USER";
sql(query)
.withHive()
.ok(expectedSql)
.withBigQuery()
.ok(expectedSqlBQ);
}
@Test public void testCurrentUserWithAlias() {
String query = "select CURRENT_USER myuser from \"product\" where \"product_id\" = 1";
final String expectedSql = "SELECT CURRENT_USER() MYUSER\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1";
final String expected = "SELECT SESSION_USER() AS MYUSER\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1";
sql(query)
.withHive()
.ok(expectedSql)
.withBigQuery()
.ok(expected);
}
@Test void testSelectCountStar() {
final String query = "select count(*) from \"product\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
Sql sql = sql(query);
sql.ok(expected);
}
@Test void testRowValueExpression() {
String sql = "insert into \"DEPT\"\n"
+ "values ROW(1,'Fred', 'San Francisco'),\n"
+ " ROW(2, 'Eric', 'Washington')";
final String expectedDefault = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedDefaultX = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
final String expectedHive = "INSERT INTO SCOTT.DEPT (DEPTNO, DNAME, LOC)\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedHiveX = "INSERT INTO SCOTT.DEPT (DEPTNO, DNAME, LOC)\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'";
final String expectedMysql = "INSERT INTO `SCOTT`.`DEPT`"
+ " (`DEPTNO`, `DNAME`, `LOC`)\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedMysqlX = "INSERT INTO `SCOTT`.`DEPT`"
+ " (`DEPTNO`, `DNAME`, `LOC`)\nSELECT 1, 'Fred', 'San Francisco'\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'";
final String expectedOracle = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedOracleX = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "FROM \"DUAL\"\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM \"DUAL\"";
final String expectedMssql = "INSERT INTO [SCOTT].[DEPT]"
+ " ([DEPTNO], [DNAME], [LOC])\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedMssqlX = "INSERT INTO [SCOTT].[DEPT]"
+ " ([DEPTNO], [DNAME], [LOC])\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'";
final String expectedCalcite = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedCalciteX = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expectedDefault)
.withHive().ok(expectedHive)
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
.withMssql().ok(expectedMssql)
.withCalcite().ok(expectedCalcite)
.withConfig(c ->
c.withRelBuilderConfigTransform(b ->
b.withSimplifyValues(false)))
.withCalcite().ok(expectedDefaultX)
.withHive().ok(expectedHiveX)
.withMysql().ok(expectedMysqlX)
.withOracle().ok(expectedOracleX)
.withMssql().ok(expectedMssqlX)
.withCalcite().ok(expectedCalciteX);
}
@Test void testInsertValuesWithDynamicParams() {
final String sql = "insert into \"DEPT\" values (?,?,?), (?,?,?)";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected);
}
@Test void testInsertValuesWithExplicitColumnsAndDynamicParams() {
final String sql = ""
+ "insert into \"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "values (?,?,?), (?,?,?)";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected);
}
@Test void testTableFunctionScan() {
final String query = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR(select \"product_id\", \"product_name\"\n"
+ "from \"product\"), CURSOR(select \"employee_id\", \"full_name\"\n"
+ "from \"employee\"), 'NAME'))";
final String expected = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR ((SELECT \"product_id\", \"product_name\"\n"
+ "FROM \"foodmart\".\"product\")), CURSOR ((SELECT \"employee_id\", \"full_name\"\n"
+ "FROM \"foodmart\".\"employee\")), 'NAME'))";
sql(query).ok(expected);
final String query2 = "select * from table(ramp(3))";
sql(query2).ok("SELECT *\n"
+ "FROM TABLE(RAMP(3))");
}
@Test void testTableFunctionScanWithComplexQuery() {
final String query = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR(select \"product_id\", \"product_name\"\n"
+ "from \"product\"\n"
+ "where \"net_weight\" > 100 and \"product_name\" = 'Hello World')\n"
+ ",CURSOR(select \"employee_id\", \"full_name\"\n"
+ "from \"employee\"\n"
+ "group by \"employee_id\", \"full_name\"), 'NAME'))";
final String expected = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR ((SELECT \"product_id\", \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" > 100 AND \"product_name\" = 'Hello World')), "
+ "CURSOR ((SELECT \"employee_id\", \"full_name\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"employee_id\", \"full_name\")), 'NAME'))";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3593">[CALCITE-3593]
* RelToSqlConverter changes target of ambiguous HAVING clause with a Project
* on Filter on Aggregate</a>. */
/*@Test void testBigQueryHaving() {
final String sql = ""
+ "SELECT \"DEPTNO\" - 10 \"DEPT\"\n"
+ "FROM \"EMP\"\n"
+ "GROUP BY \"DEPTNO\"\n"
+ "HAVING \"DEPTNO\" > 0";
final String expected = ""
+ "SELECT DEPTNO - 10 AS DEPTNO\n"
+ "FROM (SELECT DEPTNO\n"
+ "FROM SCOTT.EMP\n"
+ "GROUP BY DEPTNO\n"
+ "HAVING DEPTNO > 0) AS t1";
// Parse the input SQL with PostgreSQL dialect,
// in which "isHavingAlias" is false.
final SqlParser.Config parserConfig =
PostgresqlSqlDialect.DEFAULT.configureParser(SqlParser.config());
// Convert rel node to SQL with BigQuery dialect,
// in which "isHavingAlias" is true.
sql(sql)
.parserConfig(parserConfig)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.withBigQuery()
.ok(expected);
}
*/
@Test public void testCastToTimestamp() {
String query = "SELECT cast(\"birth_date\" as TIMESTAMP) "
+ "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT CAST(birth_date AS TIMESTAMP)\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(birth_date AS DATETIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTimestampWithPrecision() {
String query = "SELECT cast(\"birth_date\" as TIMESTAMP(3)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT CAST(DATE_FORMAT(CAST(birth_date AS TIMESTAMP), "
+ "'yyyy-MM-dd HH:mm:ss.sss') AS TIMESTAMP)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT CAST(DATE_FORMAT(CAST(birth_date AS TIMESTAMP), "
+ "'yyyy-MM-dd HH:mm:ss.SSS') AS TIMESTAMP)\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(FORMAT_TIMESTAMP('%F %H:%M:%E3S', CAST"
+ "(birth_date AS DATETIME)) AS DATETIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTime() {
String query = "SELECT cast(\"hire_date\" as TIME) "
+ "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT SPLIT(DATE_FORMAT(hire_date, 'yyyy-MM-dd HH:mm:ss'), ' ')[1]\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(hire_date AS TIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTimeWithPrecision() {
String query = "SELECT cast(\"hire_date\" as TIME(5)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT SPLIT(DATE_FORMAT(hire_date, 'yyyy-MM-dd HH:mm:ss.sss'), "
+ "' ')[1]\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT SPLIT(DATE_FORMAT(hire_date, 'yyyy-MM-dd HH:mm:ss.SSS'),"
+ " ' ')[1]\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(FORMAT_TIME('%H:%M:%E3S', CAST(hire_date AS TIME))"
+ " AS TIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTimeWithPrecisionWithStringInput() {
String query = "SELECT cast('12:00'||':05' as TIME(5)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT CONCAT('12:00', ':05')\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT '12:00' || ':05'\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(FORMAT_TIME('%H:%M:%E3S', CAST(CONCAT('12:00', "
+ "':05') AS TIME)) AS TIME)\n"
+ "FROM foodmart.employee";
final String mssql = "SELECT CAST(CONCAT('12:00', ':05') AS TIME(3))\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(mssql);
}
@Test public void testCastToTimeWithPrecisionWithStringLiteral() {
String query = "SELECT cast('12:00:05' as TIME(3)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT '12:00:05'\n"
+ "FROM foodmart.employee";
final String expectedSpark = expectedHive;
final String expectedBigQuery = "SELECT TIME '12:00:05.000'\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testFormatDateRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatDateRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("YYYY-MM-DD"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_DATE('YYYY-MM-DD', \"HIREDATE\") AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_DATE('%F', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT DATE_FORMAT(HIREDATE, 'yyyy-MM-dd') FD\n"
+ "FROM scott.EMP";
final String expectedSnowFlake = "SELECT TO_VARCHAR(\"HIREDATE\", 'YYYY-MM-DD') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSpark = expectedHive;
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSnowFlake));
}
@Test public void testDOMAndDOY() {
final RelBuilder builder = relBuilder();
final RexNode dayOfMonthRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("W"), builder.scan("EMP").field(4));
final RexNode dayOfYearRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("WW"), builder.scan("EMP").field(4));
final RelNode domRoot = builder
.scan("EMP")
.project(builder.alias(dayOfMonthRexNode, "FD"))
.build();
final RelNode doyRoot = builder
.scan("EMP")
.project(builder.alias(dayOfYearRexNode, "FD"))
.build();
final String expectedDOMBiqQuery = "SELECT CAST(CEIL(EXTRACT(DAY "
+ "FROM HIREDATE) / 7) AS STRING) AS FD\n"
+ "FROM scott.EMP";
final String expectedDOYBiqQuery = "SELECT CAST(CEIL(EXTRACT(DAYOFYEAR "
+ "FROM HIREDATE) / 7) AS STRING) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(doyRoot, DatabaseProduct.BIG_QUERY.getDialect()),
isLinux(expectedDOYBiqQuery));
assertThat(toSql(domRoot, DatabaseProduct.BIG_QUERY.getDialect()),
isLinux(expectedDOMBiqQuery));
}
@Test public void testFormatTimestampRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYY-MM-DD HH:MI:SS.S(5)"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('YYYY-MM-DD HH:MI:SS.S(5)', \"HIREDATE\") "
+ "AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSpark = "SELECT DATE_FORMAT(HIREDATE, 'yyyy-MM-dd hh:mm:ss.SSSSS') FD\n"
+ "FROM scott.EMP";
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%F %I:%M:%E5S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT DATE_FORMAT(HIREDATE, 'yyyy-MM-dd hh:mm:ss.sssss') FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
}
@Test public void testFormatTimestampFormatsRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode2 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("HH24MI"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode3 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("HH24MISS"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode4 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHH24MISS"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode5 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHHMISS"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode6 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHH24MI"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode7 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHH24"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode8 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("MS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode2, "FD2"),
builder.alias(formatTimestampRexNode3, "FD3"),
builder.alias(formatTimestampRexNode4, "FD4"),
builder.alias(formatTimestampRexNode5, "FD5"),
builder.alias(formatTimestampRexNode6, "FD6"),
builder.alias(formatTimestampRexNode7, "FD7"),
builder.alias(formatTimestampRexNode8, "FD8"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('HH24MI', \"HIREDATE\") AS \"FD2\", "
+ "FORMAT_TIMESTAMP('HH24MISS', \"HIREDATE\") AS \"FD3\", "
+ "FORMAT_TIMESTAMP('YYYYMMDDHH24MISS', \"HIREDATE\") AS \"FD4\", "
+ "FORMAT_TIMESTAMP('YYYYMMDDHHMISS', \"HIREDATE\") AS \"FD5\", FORMAT_TIMESTAMP"
+ "('YYYYMMDDHH24MI', \"HIREDATE\") AS \"FD6\", FORMAT_TIMESTAMP('YYYYMMDDHH24', "
+ "\"HIREDATE\") AS \"FD7\", FORMAT_TIMESTAMP('MS', \"HIREDATE\") AS \"FD8\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%H%M', HIREDATE) AS FD2, "
+ "FORMAT_TIMESTAMP('%H%M%S', HIREDATE) AS FD3, FORMAT_TIMESTAMP('%Y%m%d%H%M%S', "
+ "HIREDATE) AS FD4, FORMAT_TIMESTAMP('%Y%m%d%I%M%S', HIREDATE) AS FD5, FORMAT_TIMESTAMP"
+ "('%Y%m%d%H%M', HIREDATE) AS FD6, FORMAT_TIMESTAMP('%Y%m%d%H', HIREDATE) AS FD7, "
+ "FORMAT_TIMESTAMP('%E', HIREDATE) AS FD8\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testFormatTimeRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatTimeRexNode = builder.call(SqlLibraryOperators.FORMAT_TIME,
builder.literal("HH:MI:SS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimeRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_TIME('HH:MI:SS', \"HIREDATE\") AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_TIME('%I:%M:%S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT DATE_FORMAT(HIREDATE, 'hh:mm:ss') FD\n"
+ "FROM scott.EMP";
final String expectedSpark = expectedHive;
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
}
@Test public void testStrToDateRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode strToDateNode1 = builder.call(SqlLibraryOperators.STR_TO_DATE,
builder.literal("20181106"), builder.literal("YYYYMMDD"));
final RexNode strToDateNode2 = builder.call(SqlLibraryOperators.STR_TO_DATE,
builder.literal("2018/11/06"), builder.literal("YYYY/MM/DD"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(strToDateNode1, "date1"), builder.alias(strToDateNode2, "date2"))
.build();
final String expectedSql = "SELECT STR_TO_DATE('20181106', 'YYYYMMDD') AS \"date1\", "
+ "STR_TO_DATE('2018/11/06', 'YYYY/MM/DD') AS \"date2\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT PARSE_DATE('%Y%m%d', '20181106') AS date1, "
+ "PARSE_DATE('%Y/%m/%d', '2018/11/06') AS date2\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT CAST(FROM_UNIXTIME("
+ "UNIX_TIMESTAMP('20181106', 'yyyyMMdd'), 'yyyy-MM-dd') AS DATE) date1, "
+ "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP('2018/11/06', 'yyyy/MM/dd'), 'yyyy-MM-dd') AS DATE) date2\n"
+ "FROM scott.EMP";
final String expectedSpark = expectedHive;
final String expectedSnowflake =
"SELECT TO_DATE('20181106', 'YYYYMMDD') AS \"date1\", "
+ "TO_DATE('2018/11/06', 'YYYY/MM/DD') AS \"date2\"\n"
+ "FROM \"scott\".\"EMP\"";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSnowflake));
}
@Test public void testFormatDatetimeRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatDateNode1 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("DDMMYY"), builder.literal("2008-12-25 15:30:00"));
final RexNode formatDateNode2 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("YY/MM/DD"), builder.literal("2012-12-25 12:50:10"));
final RexNode formatDateNode3 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("YY-MM-01"), builder.literal("2012-12-25 12:50:10"));
final RexNode formatDateNode4 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("YY-MM-DD 00:00:00"), builder.literal("2012-12-25 12:50:10"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateNode1, "date1"),
builder.alias(formatDateNode2, "date2"),
builder.alias(formatDateNode3, "date3"),
builder.alias(formatDateNode4, "date4"))
.build();
final String expectedSql = "SELECT FORMAT_DATETIME('DDMMYY', '2008-12-25 15:30:00') AS "
+ "\"date1\", FORMAT_DATETIME('YY/MM/DD', '2012-12-25 12:50:10') AS \"date2\", "
+ "FORMAT_DATETIME('YY-MM-01', '2012-12-25 12:50:10') AS \"date3\", FORMAT_DATETIME"
+ "('YY-MM-DD 00:00:00', '2012-12-25 12:50:10') AS \"date4\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_DATETIME('%d%m%y', '2008-12-25 15:30:00') "
+ "AS date1, FORMAT_DATETIME('%y/%m/%d', '2012-12-25 12:50:10') AS date2,"
+ " FORMAT_DATETIME('%y-%m-01', '2012-12-25 12:50:10') AS date3,"
+ " FORMAT_DATETIME('%y-%m-%d 00:00:00', '2012-12-25 12:50:10') AS date4\n"
+ "FROM scott.EMP";
final String expectedSpark = "SELECT DATE_FORMAT('2008-12-25 15:30:00', 'ddMMyy') date1, "
+ "DATE_FORMAT('2012-12-25 12:50:10', 'yy/MM/dd') date2,"
+ " DATE_FORMAT('2012-12-25 12:50:10', 'yy-MM-01') date3,"
+ " DATE_FORMAT('2012-12-25 12:50:10', 'yy-MM-dd 00:00:00') date4\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
}
@Test public void testParseTimestampFunctionFormat() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd HH24:MI:SS"), builder.literal("2009-03-20 12:25:50"));
final RexNode parseTSNode2 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("MI dd-YYYY-MM SS HH24"), builder.literal("25 20-2009-03 50 12"));
final RexNode parseTSNode3 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@MM@dd@hh@mm@ss"), builder.literal("20200903020211"));
final RexNode parseTSNode4 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@MM@dd@HH@mm@ss"), builder.literal("20200903210211"));
final RexNode parseTSNode5 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("HH@mm@ss"), builder.literal("215313"));
final RexNode parseTSNode6 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("MM@dd@yy"), builder.literal("090415"));
final RexNode parseTSNode7 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("MM@dd@yy"), builder.literal("Jun1215"));
final RexNode parseTSNode8 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@MM@dd@HH"), builder.literal("2015061221"));
final RexNode parseTSNode9 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@dd@mm"), builder.literal("20150653"));
final RexNode parseTSNode10 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@mm@dd"), builder.literal("20155308"));
final RexNode parseTSNode11 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd@HH:mm:ss"), builder.literal("2009-03-2021:25:50"));
final RexNode parseTSNode12 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd@hh:mm:ss"), builder.literal("2009-03-2007:25:50"));
final RexNode parseTSNode13 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd@hh:mm:ss z"), builder.literal("2009-03-20 12:25:50.222"));
final RexNode parseTSNode14 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd'T'hh:mm:ss"), builder.literal("2012-05-09T04:12:12"));
final RexNode parseTSNode15 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy- MM-dd HH: -mm:ss"), builder.literal("2015- 09-11 09: -07:23"));
final RexNode parseTSNode16 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy- MM-dd@HH: -mm:ss"), builder.literal("2015- 09-1109: -07:23"));
final RexNode parseTSNode17 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy-MM-dd-HH:mm:ss.S(3)@ZZ"), builder.literal("2015-09-11-09:07:23"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date1"), builder.alias(parseTSNode2, "date2"),
builder.alias(parseTSNode3, "timestamp1"), builder.alias(parseTSNode4, "timestamp2"),
builder.alias(parseTSNode5, "time1"), builder.alias(parseTSNode6, "date1"),
builder.alias(parseTSNode7, "date2"), builder.alias(parseTSNode8, "date3"),
builder.alias(parseTSNode9, "date5"),
builder.alias(parseTSNode10, "date6"), builder.alias(parseTSNode11, "timestamp3"),
builder.alias(parseTSNode12, "timestamp4"), builder.alias(parseTSNode13, "timestamp5"),
builder.alias(parseTSNode14, "timestamp6"), builder.alias(parseTSNode15, "timestamp7"),
builder.alias(parseTSNode16, "timestamp8"), builder.alias(parseTSNode17, "timestamp9"))
.build();
final String expectedSql =
"SELECT PARSE_TIMESTAMP('YYYY-MM-dd HH24:MI:SS', '2009-03-20 12:25:50') AS \"date1\","
+ " PARSE_TIMESTAMP('MI dd-YYYY-MM SS HH24', '25 20-2009-03 50 12') AS \"date2\","
+ " PARSE_TIMESTAMP('yyyy@MM@dd@hh@mm@ss', '20200903020211') AS \"timestamp1\","
+ " PARSE_TIMESTAMP('yyyy@MM@dd@HH@mm@ss', '20200903210211') AS \"timestamp2\","
+ " PARSE_TIMESTAMP('HH@mm@ss', '215313') AS \"time1\", "
+ "PARSE_TIMESTAMP('MM@dd@yy', '090415') AS \"date10\", "
+ "PARSE_TIMESTAMP('MM@dd@yy', 'Jun1215') AS \"date20\", "
+ "PARSE_TIMESTAMP('yyyy@MM@dd@HH', '2015061221') AS \"date3\", "
+ "PARSE_TIMESTAMP('yyyy@dd@mm', '20150653') AS \"date5\", "
+ "PARSE_TIMESTAMP('yyyy@mm@dd', '20155308') AS \"date6\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd@HH:mm:ss', '2009-03-2021:25:50') AS \"timestamp3\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd@hh:mm:ss', '2009-03-2007:25:50') AS \"timestamp4\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd@hh:mm:ss z', '2009-03-20 12:25:50.222') AS \"timestamp5\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd''T''hh:mm:ss', '2012-05-09T04:12:12') AS \"timestamp6\""
+ ", PARSE_TIMESTAMP('yyyy- MM-dd HH: -mm:ss', '2015- 09-11 09: -07:23') AS \"timestamp7\""
+ ", PARSE_TIMESTAMP('yyyy- MM-dd@HH: -mm:ss', '2015- 09-1109: -07:23') AS \"timestamp8\""
+ ", PARSE_TIMESTAMP('yyyy-MM-dd-HH:mm:ss.S(3)@ZZ', '2015-09-11-09:07:23') AS \"timestamp9\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery =
"SELECT PARSE_DATETIME('%F %H:%M:%S', '2009-03-20 12:25:50') AS date1,"
+ " PARSE_DATETIME('%M %d-%Y-%m %S %H', '25 20-2009-03 50 12') AS date2,"
+ " PARSE_DATETIME('%Y%m%d%I%m%S', '20200903020211') AS timestamp1,"
+ " PARSE_DATETIME('%Y%m%d%I%m%S', '20200903210211') AS timestamp2,"
+ " PARSE_DATETIME('%I%m%S', '215313') AS time1,"
+ " PARSE_DATETIME('%m%d%y', '090415') AS date10,"
+ " PARSE_DATETIME('%m%d%y', 'Jun1215') AS date20,"
+ " PARSE_DATETIME('%Y%m%d%I', '2015061221') AS date3,"
+ " PARSE_DATETIME('%Y%d%m', '20150653') AS date5,"
+ " PARSE_DATETIME('%Y%m%d', '20155308') AS date6,"
+ " PARSE_DATETIME('%F%I:%m:%S', '2009-03-2021:25:50') AS timestamp3,"
+ " PARSE_DATETIME('%F%I:%m:%S', '2009-03-2007:25:50') AS timestamp4, "
+ "PARSE_DATETIME('%F%I:%m:%S %Z', '2009-03-20 12:25:50.222') AS timestamp5, "
+ "PARSE_DATETIME('%FT%I:%m:%S', '2012-05-09T04:12:12') AS timestamp6,"
+ " PARSE_DATETIME('%Y- %m-%d %I: -%m:%S', '2015- 09-11 09: -07:23') AS timestamp7,"
+ " PARSE_DATETIME('%Y- %m-%d%I: -%m:%S', '2015- 09-1109: -07:23') AS timestamp8,"
+ " PARSE_DATETIME('%F-%I:%m:%E3S%Ez', '2015-09-11-09:07:23') AS timestamp9\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testToTimestampFunction() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.TO_TIMESTAMP,
builder.literal("2009-03-20 12:25:50"), builder.literal("yyyy-MM-dd HH24:MI:SS"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "timestamp_value"))
.build();
final String expectedSql =
"SELECT TO_TIMESTAMP('2009-03-20 12:25:50', 'yyyy-MM-dd HH24:MI:SS') AS "
+ "\"timestamp_value\"\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery =
"SELECT PARSE_DATETIME('%F %H:%M:%S', '2009-03-20 12:25:50') AS timestamp_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void toTimestampFunction() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.TO_TIMESTAMP,
builder.literal("Jan 15, 1989, 11:00:06 AM"), builder.literal("MMM dd, YYYY,HH:MI:SS AM"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "timestamp_value"))
.build();
final String expectedSql =
"SELECT TO_TIMESTAMP('Jan 15, 1989, 11:00:06 AM', 'MMM dd, YYYY,HH:MI:SS AM') AS "
+ "\"timestamp_value\"\nFROM \"scott\".\"EMP\"";
final String expectedSF =
"SELECT TO_TIMESTAMP('Jan 15, 1989, 11:00:06 AM' , 'MON DD, YYYY,HH:MI:SS AM') AS "
+ "\"timestamp_value\"\nFROM \"scott\".\"EMP\"";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void datediffFunctionWithTwoOperands() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.DATE_DIFF,
builder.literal("1994-07-21"), builder.literal("1993-07-21"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date_diff_value"))
.build();
final String expectedSql =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21') AS \"date_diff_value\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBQ =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21') AS date_diff_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void datediffFunctionWithThreeOperands() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.DATE_DIFF,
builder.literal("1994-07-21"), builder.literal("1993-07-21"), builder.literal("Month"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date_diff_value"))
.build();
final String expectedSql =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21', 'Month') AS \"date_diff_value\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBQ =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21', Month) AS date_diff_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testToDateFunction() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.TO_DATE,
builder.literal("2009/03/20"), builder.literal("yyyy/MM/dd"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date_value"))
.build();
final String expectedSql =
"SELECT TO_DATE('2009/03/20', 'yyyy/MM/dd') AS \"date_value\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery =
"SELECT DATE(PARSE_DATETIME('%Y/%m/%d', '2009/03/20')) AS date_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
/** Fluid interface to run tests. */
static class Sql {
private final SchemaPlus schema;
private final String sql;
private final SqlDialect dialect;
private final Function<RelBuilder, RelNode> relFn;
private final List<Function<RelNode, RelNode>> transforms;
private final SqlParser.Config parserConfig;
private final UnaryOperator<SqlToRelConverter.Config> config;
Sql(CalciteAssert.SchemaSpec schemaSpec, String sql, SqlDialect dialect,
SqlParser.Config parserConfig,
UnaryOperator<SqlToRelConverter.Config> config,
Function<RelBuilder, RelNode> relFn,
List<Function<RelNode, RelNode>> transforms) {
final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
this.schema = CalciteAssert.addSchema(rootSchema, schemaSpec);
this.sql = sql;
this.dialect = dialect;
this.relFn = relFn;
this.transforms = ImmutableList.copyOf(transforms);
this.parserConfig = parserConfig;
this.config = config;
}
Sql(SchemaPlus schema, String sql, SqlDialect dialect,
SqlParser.Config parserConfig,
UnaryOperator<SqlToRelConverter.Config> config,
Function<RelBuilder, RelNode> relFn,
List<Function<RelNode, RelNode>> transforms) {
this.schema = schema;
this.sql = sql;
this.dialect = dialect;
this.relFn = relFn;
this.transforms = ImmutableList.copyOf(transforms);
this.parserConfig = parserConfig;
this.config = config;
}
Sql dialect(SqlDialect dialect) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql relFn(Function<RelBuilder, RelNode> relFn) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql withCalcite() {
return dialect(SqlDialect.DatabaseProduct.CALCITE.getDialect());
}
Sql withClickHouse() {
return dialect(SqlDialect.DatabaseProduct.CLICKHOUSE.getDialect());
}
Sql withDb2() {
return dialect(SqlDialect.DatabaseProduct.DB2.getDialect());
}
Sql withHive() {
return dialect(SqlDialect.DatabaseProduct.HIVE.getDialect());
}
Sql withHive2() {
return dialect(
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(1)
.withNullCollation(NullCollation.LOW)));
}
Sql withHsqldb() {
return dialect(SqlDialect.DatabaseProduct.HSQLDB.getDialect());
}
Sql withMssql() {
return withMssql(14); // MSSQL 2008 = 10.0, 2012 = 11.0, 2017 = 14.0
}
Sql withMssql(int majorVersion) {
final SqlDialect mssqlDialect = DatabaseProduct.MSSQL.getDialect();
return dialect(
new MssqlSqlDialect(MssqlSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(majorVersion)
.withIdentifierQuoteString(mssqlDialect.quoteIdentifier("")
.substring(0, 1))
.withNullCollation(mssqlDialect.getNullCollation())));
}
Sql withMysql() {
return dialect(SqlDialect.DatabaseProduct.MYSQL.getDialect());
}
Sql withMysql8() {
final SqlDialect mysqlDialect = DatabaseProduct.MYSQL.getDialect();
return dialect(
new SqlDialect(MysqlSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(8)
.withIdentifierQuoteString(mysqlDialect.quoteIdentifier("")
.substring(0, 1))
.withNullCollation(mysqlDialect.getNullCollation())));
}
Sql withOracle() {
return dialect(SqlDialect.DatabaseProduct.ORACLE.getDialect());
}
Sql withPostgresql() {
return dialect(SqlDialect.DatabaseProduct.POSTGRESQL.getDialect());
}
Sql withPresto() {
return dialect(DatabaseProduct.PRESTO.getDialect());
}
Sql withRedshift() {
return dialect(DatabaseProduct.REDSHIFT.getDialect());
}
Sql withSnowflake() {
return dialect(DatabaseProduct.SNOWFLAKE.getDialect());
}
Sql withSybase() {
return dialect(DatabaseProduct.SYBASE.getDialect());
}
Sql withVertica() {
return dialect(SqlDialect.DatabaseProduct.VERTICA.getDialect());
}
Sql withBigQuery() {
return dialect(SqlDialect.DatabaseProduct.BIG_QUERY.getDialect());
}
Sql withSpark() {
return dialect(DatabaseProduct.SPARK.getDialect());
}
Sql withHiveIdentifierQuoteString() {
final HiveSqlDialect hiveSqlDialect =
new HiveSqlDialect((SqlDialect.EMPTY_CONTEXT)
.withDatabaseProduct(DatabaseProduct.HIVE)
.withIdentifierQuoteString("`"));
return dialect(hiveSqlDialect);
}
Sql withSparkIdentifierQuoteString() {
final SparkSqlDialect sparkSqlDialect =
new SparkSqlDialect((SqlDialect.EMPTY_CONTEXT)
.withDatabaseProduct(DatabaseProduct.SPARK)
.withIdentifierQuoteString("`"));
return dialect(sparkSqlDialect);
}
Sql withPostgresqlModifiedTypeSystem() {
// Postgresql dialect with max length for varchar set to 256
final PostgresqlSqlDialect postgresqlSqlDialect =
new PostgresqlSqlDialect(PostgresqlSqlDialect.DEFAULT_CONTEXT
.withDataTypeSystem(new RelDataTypeSystemImpl() {
@Override public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
return 256;
default:
return super.getMaxPrecision(typeName);
}
}
}));
return dialect(postgresqlSqlDialect);
}
Sql withOracleModifiedTypeSystem() {
// Oracle dialect with max length for varchar set to 512
final OracleSqlDialect oracleSqlDialect =
new OracleSqlDialect(OracleSqlDialect.DEFAULT_CONTEXT
.withDataTypeSystem(new RelDataTypeSystemImpl() {
@Override public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
return 512;
default:
return super.getMaxPrecision(typeName);
}
}
}));
return dialect(oracleSqlDialect);
}
Sql parserConfig(SqlParser.Config parserConfig) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql withConfig(UnaryOperator<SqlToRelConverter.Config> config) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql optimize(final RuleSet ruleSet, final RelOptPlanner relOptPlanner) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
FlatLists.append(transforms, r -> {
Program program = Programs.of(ruleSet);
final RelOptPlanner p =
Util.first(relOptPlanner,
new HepPlanner(
new HepProgramBuilder().addRuleClass(RelOptRule.class)
.build()));
return program.run(p, r, r.getTraitSet(),
ImmutableList.of(), ImmutableList.of());
}));
}
Sql ok(String expectedQuery) {
assertThat(exec(), isLinux(expectedQuery));
return this;
}
Sql throws_(String errorMessage) {
try {
final String s = exec();
throw new AssertionError("Expected exception with message `"
+ errorMessage + "` but nothing was thrown; got " + s);
} catch (Exception e) {
assertThat(e.getMessage(), is(errorMessage));
return this;
}
}
String exec() {
try {
RelNode rel;
if (relFn != null) {
rel = relFn.apply(relBuilder());
} else {
final SqlToRelConverter.Config config = this.config.apply(SqlToRelConverter.config()
.withTrimUnusedFields(false));
final Planner planner =
getPlanner(null, parserConfig, schema, config);
SqlNode parse = planner.parse(sql);
SqlNode validate = planner.validate(parse);
rel = planner.rel(validate).rel;
}
for (Function<RelNode, RelNode> transform : transforms) {
rel = transform.apply(rel);
}
return toSql(rel, dialect);
} catch (Exception e) {
throw TestUtil.rethrow(e);
}
}
public Sql schema(CalciteAssert.SchemaSpec schemaSpec) {
return new Sql(schemaSpec, sql, dialect, parserConfig, config, relFn,
transforms);
}
}
@Test public void testIsNotTrueWithEqualCondition() {
final String query = "select \"product_name\" from \"product\" where "
+ "\"product_name\" = 'Hello World' is not true";
final String bigQueryExpected = "SELECT product_name\n"
+ "FROM foodmart.product\n"
+ "WHERE product_name <> 'Hello World'";
sql(query)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testCoalseceWithCast() {
final String query = "Select coalesce(cast('2099-12-31 00:00:00.123' as TIMESTAMP),\n"
+ "cast('2010-12-31 01:00:00.123' as TIMESTAMP))";
final String expectedHive = "SELECT TIMESTAMP '2099-12-31 00:00:00'";
final String expectedSpark = "SELECT TIMESTAMP '2099-12-31 00:00:00'";
final String bigQueryExpected = "SELECT CAST('2099-12-31 00:00:00' AS DATETIME)";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testCoalseceWithLiteral() {
final String query = "Select coalesce('abc','xyz')";
final String expectedHive = "SELECT 'abc'";
final String expectedSpark = "SELECT 'abc'";
final String bigQueryExpected = "SELECT 'abc'";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testCoalseceWithNull() {
final String query = "Select coalesce(null, 'abc')";
final String expectedHive = "SELECT 'abc'";
final String expectedSpark = "SELECT 'abc'";
final String bigQueryExpected = "SELECT 'abc'";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testLog10Function() {
final String query = "SELECT LOG10(2) as dd";
final String expectedSnowFlake = "SELECT LOG(10, 2) AS \"DD\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testLog10ForOne() {
final String query = "SELECT LOG10(1) as dd";
final String expectedSnowFlake = "SELECT 0 AS \"DD\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testLog10ForColumn() {
final String query = "SELECT LOG10(\"product_id\") as dd from \"product\"";
final String expectedSnowFlake = "SELECT LOG(10, \"product_id\") AS \"DD\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testDivideIntegerSnowflake() {
final RelBuilder builder = relBuilder();
final RexNode intdivideRexNode = builder.call(SqlStdOperatorTable.DIVIDE_INTEGER,
builder.scan("EMP").field(0), builder.scan("EMP").field(3));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(intdivideRexNode, "a"))
.build();
final String expectedSql = "SELECT \"EMPNO\" /INT \"MGR\" AS \"a\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSF = "SELECT FLOOR(\"EMPNO\" / \"MGR\") AS \"a\"\n"
+ "FROM \"scott\".\"EMP\"";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testRoundFunctionWithColumnPlaceHandling() {
final String query = "SELECT ROUND(123.41445, \"product_id\") AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedBq = "SELECT ROUND(123.41445, product_id) AS a\nFROM foodmart.product";
final String expected = "SELECT ROUND(123.41445, product_id) a\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT TO_DECIMAL(ROUND(123.41445, "
+ "CASE WHEN \"product_id\" > 38 THEN 38 WHEN \"product_id\" < -12 "
+ "THEN -12 ELSE \"product_id\" END) ,38, 4) AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMssql = "SELECT ROUND(123.41445, [product_id]) AS [a]\n"
+ "FROM [foodmart].[product]";
sql(query)
.withBigQuery()
.ok(expectedBq)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMssql);
}
@Test public void testRoundFunctionWithOneParameter() {
final String query = "SELECT ROUND(123.41445) AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMssql = "SELECT ROUND(123.41445, 0) AS [a]\n"
+ "FROM [foodmart].[product]";
sql(query)
.withMssql()
.ok(expectedMssql);
}
@Test public void testTruncateFunctionWithColumnPlaceHandling() {
String query = "select truncate(2.30259, \"employee_id\") from \"employee\"";
final String expectedBigQuery = "SELECT TRUNC(2.30259, employee_id)\n"
+ "FROM foodmart.employee";
final String expectedSnowFlake = "SELECT TRUNCATE(2.30259, CASE WHEN \"employee_id\" > 38"
+ " THEN 38 WHEN \"employee_id\" < -12 THEN -12 ELSE \"employee_id\" END)\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedMssql = "SELECT ROUND(2.30259, [employee_id])"
+ "\nFROM [foodmart].[employee]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMssql);
}
@Test public void testTruncateFunctionWithOneParameter() {
String query = "select truncate(2.30259) from \"employee\"";
final String expectedMssql = "SELECT ROUND(2.30259, 0)"
+ "\nFROM [foodmart].[employee]";
sql(query)
.withMssql()
.ok(expectedMssql);
}
@Test public void testWindowFunctionWithOrderByWithoutcolumn() {
String query = "Select count(*) over() from \"employee\"";
final String expectedSnowflake = "SELECT COUNT(*) OVER (ORDER BY 0 ROWS BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING)\n"
+ "FROM \"foodmart\".\"employee\"";
final String mssql = "SELECT COUNT(*) OVER ()\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withSnowflake()
.ok(expectedSnowflake)
.withMssql()
.ok(mssql);
}
@Test public void testWindowFunctionWithOrderByWithcolumn() {
String query = "select count(\"employee_id\") over () as a from \"employee\"";
final String expectedSnowflake = "SELECT COUNT(\"employee_id\") OVER (ORDER BY \"employee_id\" "
+ "ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS \"A\"\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testRoundFunction() {
final String query = "SELECT ROUND(123.41445, \"product_id\") AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowFlake = "SELECT TO_DECIMAL(ROUND(123.41445, CASE "
+ "WHEN \"product_id\" > 38 THEN 38 WHEN \"product_id\" < -12 THEN -12 "
+ "ELSE \"product_id\" END) ,38, 4) AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testRandomFunction() {
String query = "select rand_integer(1,3) from \"employee\"";
final String expectedSnowFlake = "SELECT UNIFORM(1, 3, RANDOM())\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT FLOOR(RAND() * (3 - 1 + 1)) + 1\n"
+ "FROM foodmart.employee";
final String expectedBQ = "SELECT FLOOR(RAND() * (3 - 1 + 1)) + 1\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT FLOOR(RAND() * (3 - 1 + 1)) + 1\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testCaseExprForE4() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("E4"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE CASE WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sun' "
+ "THEN 'Sunday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Mon' "
+ "THEN 'Monday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Tue' "
+ "THEN 'Tuesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Wed' "
+ "THEN 'Wednesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Thu' "
+ "THEN 'Thursday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Fri' "
+ "THEN 'Friday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sat' "
+ "THEN 'Saturday' END";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testCaseExprForEEEE() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("EEEE"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE CASE WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sun' "
+ "THEN 'Sunday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Mon' "
+ "THEN 'Monday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Tue' "
+ "THEN 'Tuesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Wed' "
+ "THEN 'Wednesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Thu' "
+ "THEN 'Thursday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Fri' "
+ "THEN 'Friday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sat' "
+ "THEN 'Saturday' END";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testCaseExprForE3() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("E3"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE TO_VARCHAR(\"HIREDATE\", 'DY')";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testCaseExprForEEE() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("EEE"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE TO_VARCHAR(\"HIREDATE\", 'DY')";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void octetLength() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.OCTET_LENGTH,
builder.field("ENAME"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE OCTET_LENGTH(ENAME)";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void octetLengthWithLiteral() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.OCTET_LENGTH,
builder.literal("ENAME"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE OCTET_LENGTH('ENAME')";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt2Shr() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.INT2SHR,
builder.literal(3), builder.literal(1), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE (3 & 6 ) >> 1";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt8Xor() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.BITWISE_XOR,
builder.literal(3), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE 3 ^ 6";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt2Shl() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.INT2SHL,
builder.literal(3), builder.literal(1), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE (3 & 6 ) << 1";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt2And() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.BITWISE_AND,
builder.literal(3), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE 3 & 6";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt1Or() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.BITWISE_OR,
builder.literal(3), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE 3 | 6";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testCot() {
final String query = "SELECT COT(0.12)";
final String expectedBQ = "SELECT 1 / TAN(0.12)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testTimestampLiteral() {
final String query = "SELECT Timestamp '1993-07-21 10:10:10'";
final String expectedBQ = "SELECT CAST('1993-07-21 10:10:10' AS DATETIME)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testCaseForLnFunction() {
final String query = "SELECT LN(\"product_id\") as dd from \"product\"";
final String expectedMssql = "SELECT LOG([product_id]) AS [DD]"
+ "\nFROM [foodmart].[product]";
sql(query)
.withMssql()
.ok(expectedMssql);
}
@Test public void testCaseForCeilToCeilingMSSQL() {
final String query = "SELECT CEIL(12345) FROM \"product\"";
final String expected = "SELECT CEILING(12345)\n"
+ "FROM [foodmart].[product]";
sql(query)
.withMssql()
.ok(expected);
}
@Test public void testLastDayMSSQL() {
final String query = "SELECT LAST_DAY(DATE '2009-12-20')";
final String expected = "SELECT EOMONTH('2009-12-20')";
sql(query)
.withMssql()
.ok(expected);
}
@Test public void testCurrentDate() {
String query =
"select CURRENT_DATE from \"product\" where \"product_id\" < 10";
final String expected = "SELECT CAST(GETDATE() AS DATE) AS [CURRENT_DATE]\n"
+ "FROM [foodmart].[product]\n"
+ "WHERE [product_id] < 10";
sql(query).withMssql().ok(expected);
}
@Test public void testCurrentTime() {
String query =
"select CURRENT_TIME from \"product\" where \"product_id\" < 10";
final String expected = "SELECT CAST(GETDATE() AS TIME) AS [CURRENT_TIME]\n"
+ "FROM [foodmart].[product]\n"
+ "WHERE [product_id] < 10";
sql(query).withMssql().ok(expected);
}
@Test public void testCurrentTimestamp() {
String query =
"select CURRENT_TIMESTAMP from \"product\" where \"product_id\" < 10";
final String expected = "SELECT GETDATE() AS [CURRENT_TIMESTAMP]\n"
+ "FROM [foodmart].[product]\n"
+ "WHERE [product_id] < 10";
sql(query).withMssql().ok(expected);
}
@Test public void testDayOfMonth() {
String query = "select DAYOFMONTH( DATE '2008-08-29')";
final String expectedMssql = "SELECT DAY('2008-08-29')";
final String expectedBQ = "SELECT EXTRACT(DAY FROM DATE '2008-08-29')";
sql(query)
.withMssql()
.ok(expectedMssql)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractDecade() {
String query = "SELECT EXTRACT(DECADE FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT CAST(SUBSTR(CAST("
+ "EXTRACT(YEAR FROM DATE '2008-08-29') AS STRING), 0, 3) AS INTEGER)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractCentury() {
String query = "SELECT EXTRACT(CENTURY FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT CAST(CEIL(EXTRACT(YEAR FROM DATE '2008-08-29') / 100) "
+ "AS INTEGER)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractDOY() {
String query = "SELECT EXTRACT(DOY FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT EXTRACT(DAYOFYEAR FROM DATE '2008-08-29')";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractDOW() {
String query = "SELECT EXTRACT(DOW FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT EXTRACT(DAYOFWEEK FROM DATE '2008-08-29')";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractEpoch() {
String query = "SELECT EXTRACT(EPOCH FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT UNIX_SECONDS(DATE '2008-08-29')";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractMillennium() {
String query = "SELECT EXTRACT(MILLENNIUM FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT CAST(SUBSTR(CAST("
+ "EXTRACT(YEAR FROM DATE '2008-08-29') AS STRING), 0, 1) AS INTEGER)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testSecFromMidnightFormatTimestamp() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("SEC_FROM_MIDNIGHT"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('SEC_FROM_MIDNIGHT', \"HIREDATE\") AS"
+ " \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT CAST(DATE_DIFF(HIREDATE, CAST(CAST(HIREDATE AS DATE) "
+ "AS DATETIME), SECOND) AS STRING) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testGetQuarterFromDate() {
final RelBuilder builder = relBuilder();
final RexNode formatDateRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("QUARTER"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT FORMAT_DATE('%Q', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testExtractDay() {
String query = "SELECT EXTRACT(DAY FROM CURRENT_DATE), EXTRACT(DAY FROM CURRENT_TIMESTAMP)";
final String expectedSFSql = "SELECT DAY(CURRENT_DATE), DAY(CURRENT_TIMESTAMP)";
final String expectedBQSql = "SELECT EXTRACT(DAY FROM CURRENT_DATE), "
+ "EXTRACT(DAY FROM CURRENT_DATETIME())";
final String expectedMsSql = "SELECT DAY(CAST(GETDATE() AS DATE)), DAY(GETDATE())";
sql(query)
.withSnowflake()
.ok(expectedSFSql)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testExtractMonth() {
String query = "SELECT EXTRACT(MONTH FROM CURRENT_DATE), EXTRACT(MONTH FROM CURRENT_TIMESTAMP)";
final String expectedSFSql = "SELECT MONTH(CURRENT_DATE), MONTH(CURRENT_TIMESTAMP)";
final String expectedBQSql = "SELECT EXTRACT(MONTH FROM CURRENT_DATE), "
+ "EXTRACT(MONTH FROM CURRENT_DATETIME())";
final String expectedMsSql = "SELECT MONTH(CAST(GETDATE() AS DATE)), MONTH(GETDATE())";
sql(query)
.withSnowflake()
.ok(expectedSFSql)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testExtractYear() {
String query = "SELECT EXTRACT(YEAR FROM CURRENT_DATE), EXTRACT(YEAR FROM CURRENT_TIMESTAMP)";
final String expectedSFSql = "SELECT YEAR(CURRENT_DATE), YEAR(CURRENT_TIMESTAMP)";
final String expectedBQSql = "SELECT EXTRACT(YEAR FROM CURRENT_DATE), "
+ "EXTRACT(YEAR FROM CURRENT_DATETIME())";
final String expectedMsSql = "SELECT YEAR(CAST(GETDATE() AS DATE)), YEAR(GETDATE())";
sql(query)
.withSnowflake()
.ok(expectedSFSql)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testIntervalMultiplyWithInteger() {
String query = "select \"hire_date\" + 10 * INTERVAL '00:01:00' HOUR "
+ "TO SECOND from \"employee\"";
final String expectedBQSql = "SELECT TIMESTAMP_ADD(hire_date, INTERVAL 10 * 60 SECOND)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testDateUnderscoreSeparator() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDD_HH24MISS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%Y%m%d_%H%M%S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testParseDatetime() {
final RelBuilder builder = relBuilder();
final RexNode parseDatetimeRexNode = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYYMMDD_HH24MISS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseDatetimeRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT PARSE_DATETIME('%Y%m%d_%H%M%S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testUnixFunctions() {
final RelBuilder builder = relBuilder();
final RexNode unixSecondsRexNode = builder.call(SqlLibraryOperators.UNIX_SECONDS,
builder.scan("EMP").field(4));
final RexNode unixMicrosRexNode = builder.call(SqlLibraryOperators.UNIX_MICROS,
builder.scan("EMP").field(4));
final RexNode unixMillisRexNode = builder.call(SqlLibraryOperators.UNIX_MILLIS,
builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(unixSecondsRexNode, "US"),
builder.alias(unixMicrosRexNode, "UM"),
builder.alias(unixMillisRexNode, "UMI"))
.build();
final String expectedBiqQuery = "SELECT UNIX_SECONDS(CAST(HIREDATE AS TIMESTAMP)) AS US, "
+ "UNIX_MICROS(CAST(HIREDATE AS TIMESTAMP)) AS UM, UNIX_MILLIS(CAST(HIREDATE AS TIMESTAMP)) "
+ "AS UMI\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimestampFunctions() {
final RelBuilder builder = relBuilder();
final RexNode unixSecondsRexNode = builder.call(SqlLibraryOperators.TIMESTAMP_SECONDS,
builder.scan("EMP").field(4));
final RexNode unixMicrosRexNode = builder.call(SqlLibraryOperators.TIMESTAMP_MICROS,
builder.scan("EMP").field(4));
final RexNode unixMillisRexNode = builder.call(SqlLibraryOperators.TIMESTAMP_MILLIS,
builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(unixSecondsRexNode, "TS"),
builder.alias(unixMicrosRexNode, "TM"),
builder.alias(unixMillisRexNode, "TMI"))
.build();
final String expectedBiqQuery = "SELECT CAST(TIMESTAMP_SECONDS(HIREDATE) AS DATETIME) AS TS, "
+ "CAST(TIMESTAMP_MICROS(HIREDATE) AS DATETIME) AS TM, CAST(TIMESTAMP_MILLIS(HIREDATE) AS "
+ "DATETIME) AS TMI\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testFormatTimestamp() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("EEEE"),
builder.cast(builder.literal("1999-07-01 15:00:00-08:00"), SqlTypeName.TIMESTAMP));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FT"))
.build();
final String expectedBiqQuery =
"SELECT FORMAT_TIMESTAMP('%A', CAST('1999-07-01 15:00:00-08:00' AS TIMESTAMP)) AS FT\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testGroupingFunction() {
String query = "SELECT \"first_name\",\"last_name\", "
+ "grouping(\"first_name\")+ grouping(\"last_name\") "
+ "from \"foodmart\".\"employee\" group by \"first_name\",\"last_name\"";
final String expectedBQSql = "SELECT first_name, last_name, CASE WHEN first_name IS NULL THEN"
+ " 1 ELSE 0 END + CASE WHEN last_name IS NULL THEN 1 ELSE 0 END\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, last_name";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testDateMinus() {
String query = "SELECT \"birth_date\" - \"birth_date\" from \"foodmart\".\"employee\"";
final String expectedBQSql = "SELECT DATE_DIFF(birth_date, birth_date, DAY)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testhashbucket() {
final RelBuilder builder = relBuilder();
final RexNode formatDateRexNode = builder.call(SqlLibraryOperators.HASHBUCKET,
builder.call(SqlLibraryOperators.HASHROW, builder.scan("EMP").field(0)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateRexNode, "FD"))
.build();
final String expectedSql = "SELECT HASHBUCKET(HASHROW(\"EMPNO\")) AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FARM_FINGERPRINT(EMPNO) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetrunc() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("DAY"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'DAY') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', DAY) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithYear() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("YEAR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'YEAR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', YEAR) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithQuarter() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("QUARTER"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'QUARTER') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', QUARTER) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithMonth() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("MONTH"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'MONTH') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', MONTH) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithWeek() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("WEEK"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'WEEK') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', WEEK) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithYear() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("YEAR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'YEAR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " YEAR) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMonth() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MONTH"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MONTH') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MONTH) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithQuarter() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("QUARTER"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'QUARTER') AS \"FD\""
+ "\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " QUARTER) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithWeek() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("WEEK"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'WEEK') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " WEEK) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithDay() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("DAY"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'DAY') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " DAY) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithHour() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("HOUR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'HOUR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " HOUR) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMinute() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MINUTE"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MINUTE') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MINUTE) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("SECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'SECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " SECOND) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMilliSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MILLISECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MILLISECOND')"
+ " AS \"FD\"\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MILLISECOND) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMicroSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MICROSECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MICROSECOND')"
+ " AS \"FD\"\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MICROSECOND) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithHour() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("HOUR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'HOUR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', HOUR) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithMinute() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("MINUTE"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'MINUTE') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', MINUTE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("SECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'SECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', SECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithMiliSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("MILLISECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'MILLISECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', MILLISECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithMicroSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("MICROSECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'MICROSECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', MICROSECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testhashrow() {
final RelBuilder builder = relBuilder();
final RexNode hashrow = builder.call(SqlLibraryOperators.HASHROW,
builder.scan("EMP").field(1));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(hashrow, "FD"))
.build();
final String expectedSql = "SELECT HASHROW(\"ENAME\") AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FARM_FINGERPRINT(ENAME) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
RelNode createLogicalValueRel(RexNode col1, RexNode col2) {
final RelBuilder builder = relBuilder();
RelDataTypeField field = new RelDataTypeFieldImpl("ZERO", 0,
builder.getTypeFactory().createSqlType(SqlTypeName.INTEGER));
List<RelDataTypeField> fieldList = new ArrayList<>();
fieldList.add(field);
RelRecordType type = new RelRecordType(fieldList);
builder.values(
ImmutableList.of(
ImmutableList.of(
builder.getRexBuilder().makeZeroLiteral(
builder.getTypeFactory().createSqlType(SqlTypeName.INTEGER))
)), type);
builder.project(col1, col2);
return builder.build();
}
@Test public void testMultipleUnionWithLogicalValue() {
final RelBuilder builder = relBuilder();
builder.push(
createLogicalValueRel(builder.alias(builder.literal("ALA"), "col1"),
builder.alias(builder.literal("AmericaAnchorage"), "col2")));
builder.push(
createLogicalValueRel(builder.alias(builder.literal("ALAW"), "col1"),
builder.alias(builder.literal("USAleutian"), "col2")));
builder.union(true);
builder.push(
createLogicalValueRel(builder.alias(builder.literal("AST"), "col1"),
builder.alias(builder.literal("AmericaHalifax"), "col2")));
builder.union(true);
final RelNode root = builder.build();
final String expectedHive = "SELECT 'ALA' col1, 'AmericaAnchorage' col2\n"
+ "UNION ALL\n"
+ "SELECT 'ALAW' col1, 'USAleutian' col2\n"
+ "UNION ALL\n"
+ "SELECT 'AST' col1, 'AmericaHalifax' col2";
final String expectedBigQuery = "SELECT 'ALA' AS col1, 'AmericaAnchorage' AS col2\n"
+ "UNION ALL\n"
+ "SELECT 'ALAW' AS col1, 'USAleutian' AS col2\n"
+ "UNION ALL\n"
+ "SELECT 'AST' AS col1, 'AmericaHalifax' AS col2";
relFn(b -> root)
.withHive2().ok(expectedHive)
.withBigQuery().ok(expectedBigQuery);
}
@Test public void testRowid() {
final RelBuilder builder = relBuilder();
final RexNode rowidRexNode = builder.call(SqlLibraryOperators.ROWID);
final RelNode root = builder
.scan("EMP")
.project(builder.alias(rowidRexNode, "FD"))
.build();
final String expectedSql = "SELECT ROWID() AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT GENERATE_UUID() AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testEscapeFunction() {
String query =
"SELECT '\\\\PWFSNFS01EFS\\imagenowcifs\\debitmemo' AS DM_SENDFILE_PATH1";
final String expectedBQSql =
"SELECT '\\\\\\\\PWFSNFS01EFS\\\\imagenowcifs\\\\debitmemo' AS "
+ "DM_SENDFILE_PATH1";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testTimeAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlLibraryOperators.TIME_ADD,
builder.literal("00:00:00"),
builder.call(SqlLibraryOperators.INTERVAL_SECONDS, builder.literal(10000)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT TIME_ADD('00:00:00', INTERVAL 10000 SECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testIntervalSeconds() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call
(SqlLibraryOperators.INTERVAL_SECONDS, builder.literal(10000));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT INTERVAL 10000 SECOND AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test void testUnicodeCharacters() {
final String query = "SELECT 'ð', '°C' FROM \"product\"";
final String expected = "SELECT '\\u00f0', '\\u00b0C'\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test public void testPlusForTimeAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.PLUS,
builder.cast(builder.literal("12:15:07"), SqlTypeName.TIME),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT TIME_ADD(TIME '12:15:07', INTERVAL 1 MICROSECOND) "
+ "AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testMinusForTimeSub() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.MINUS,
builder.cast(builder.literal("12:15:07"), SqlTypeName.TIME),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT TIME_SUB(TIME '12:15:07', INTERVAL 1 MICROSECOND) "
+ "AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testPlusForTimestampAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.PLUS,
builder.cast(builder.literal("1999-07-01 15:00:00-08:00"), SqlTypeName.TIMESTAMP),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery =
"SELECT TIMESTAMP_ADD(CAST('1999-07-01 15:00:00-08:00' AS DATETIME), "
+ "INTERVAL 1 MICROSECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testPlusForTimestampSub() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.MINUS,
builder.cast(builder.literal("1999-07-01 15:00:00-08:00"), SqlTypeName.TIMESTAMP),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery =
"SELECT TIMESTAMP_SUB(CAST('1999-07-01 15:00:00-08:00' AS DATETIME), "
+ "INTERVAL 1 MICROSECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testPlusForDateAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.PLUS,
builder.cast(builder.literal("1999-07-01"), SqlTypeName.DATE),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(86400000),
new SqlIntervalQualifier(DAY, 6, DAY,
-1, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT DATE_ADD(DATE '1999-07-01', INTERVAL 1 DAY) AS FD\n"
+ "FROM scott.EMP";
final String expectedSparkQuery = "SELECT DATE '1999-07-01' + INTERVAL '1' DAY FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testPlusForDateSub() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.MINUS,
builder.cast(builder.literal("1999-07-01"), SqlTypeName.DATE),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(86400000),
new SqlIntervalQualifier(DAY, 6, DAY,
-1, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT DATE_SUB(DATE '1999-07-01', INTERVAL 1 DAY) AS FD\n"
+ "FROM scott.EMP";
final String expectedSparkQuery = "SELECT DATE '1999-07-01' - INTERVAL '1' DAY FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testWhenTableNameAndColumnNameIsSame() {
String query =
"select \"test\" from \"foodmart\".\"test\"";
final String expectedBQSql =
"SELECT test.test\n"
+ "FROM foodmart.test AS test";
sqlTest(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testTimeOfDayFunction() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode2 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("TIMEOFDAY"), builder.call(SqlLibraryOperators.CURRENT_TIMESTAMP));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode2, "FD2"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('TIMEOFDAY', CURRENT_TIMESTAMP) AS "
+ "\"FD2\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%c', CURRENT_DATETIME()) AS FD2\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test void testConversionOfFilterWithCrossJoinToFilterWithInnerJoin() {
String query =
"select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\" "
+ "and \"e\".\"employee_id\" > 2";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id\n"
+ "WHERE employee.employee_id > 2";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void testConversionOfFilterWithCrossJoinToFilterWithInnerJoinWithOneConditionInFilter() {
String query =
"select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\"";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void testConversionOfFilterWithThreeCrossJoinToFilterWithInnerJoin() {
String query = "select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\", \n"
+ " \"foodmart\".\"reserve_employee\" as \"re\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\" and \"e\".\"employee_id\" > 2\n"
+ " and \"re\".\"employee_id\" > \"e\".\"employee_id\"\n"
+ " and \"e\".\"department_id\" > 5";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id\n"
+ "INNER JOIN foodmart.reserve_employee "
+ "ON employee.employee_id < reserve_employee.employee_id\n"
+ "WHERE employee.employee_id > 2 AND employee.department_id > 5";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void testConversionOfFilterWithCompositeConditionWithThreeCrossJoinToFilterWithInnerJoin() {
String query = "select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\", \n"
+ " \"foodmart\".\"reserve_employee\" as \"re\"\n"
+ " where (\"e\".\"department_id\" = \"d\".\"department_id\"\n"
+ " or \"re\".\"employee_id\" = \"e\".\"employee_id\")\n"
+ " and \"re\".\"employee_id\" = \"d\".\"department_id\"\n";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON TRUE\n"
+ "INNER JOIN foodmart.reserve_employee ON TRUE\n"
+ "WHERE (employee.department_id = department.department_id "
+ "OR reserve_employee.employee_id = employee.employee_id) "
+ "AND reserve_employee.employee_id = department.department_id";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
//WHERE t1.c1 = t2.c1 AND t2.c2 = t3.c2 AND (t1.c3 = t3.c3 OR t1.c4 = t2.c4)
@Test void testFilterWithParenthesizedConditionsWithThreeCrossJoinToFilterWithInnerJoin() {
String query = "select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\", \n"
+ " \"foodmart\".\"reserve_employee\" as \"re\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\"\n"
+ " and \"re\".\"employee_id\" = \"d\".\"department_id\"\n"
+ " and (\"re\".\"department_id\" < \"d\".\"department_id\"\n"
+ " or \"d\".\"department_id\" = \"re\".\"department_id\")\n";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON TRUE\n"
+ "INNER JOIN foodmart.reserve_employee ON TRUE\n"
+ "WHERE employee.department_id = department.department_id "
+ "AND reserve_employee.employee_id = department.department_id "
+ "AND (reserve_employee.department_id < department.department_id "
+ "OR department.department_id = reserve_employee.department_id)";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void translateCastOfTimestampWithLocalTimeToTimestampInBq() {
final RelBuilder relBuilder = relBuilder();
final RexNode castTimestampTimeZoneCall =
relBuilder.cast(relBuilder.call(SqlStdOperatorTable.CURRENT_TIMESTAMP),
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE);
final RelNode root = relBuilder
.values(new String[] {"c"}, 1)
.project(castTimestampTimeZoneCall)
.build();
final String expectedBigQuery =
"SELECT CAST(CURRENT_DATETIME() AS TIMESTAMP_WITH_LOCAL_TIME_ZONE) AS `$f0`";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBigQuery));
}
@Test public void testParseDateTimeFormat() {
final RelBuilder builder = relBuilder();
final RexNode parseDateNode = builder.call(SqlLibraryOperators.PARSE_DATE,
builder.literal("YYYYMMDD"), builder.literal("99991231"));
final RexNode parseTimeNode = builder.call(SqlLibraryOperators.PARSE_TIME,
builder.literal("HH24MISS"), builder.literal("122333"));
final RelNode root = builder.scan("EMP").
project(builder.alias(parseDateNode, "date1"),
builder.alias(parseTimeNode, "time1"))
.build();
final String expectedSql = "SELECT PARSE_DATE('YYYYMMDD', '99991231') AS \"date1\", "
+ "PARSE_TIME('HH24MISS', '122333') AS \"time1\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT PARSE_DATE('%Y%m%d', '99991231') AS date1, "
+ "PARSE_TIME('%H%M%S', '122333') AS time1\n"
+ "FROM scott.EMP";
final String expectedSparkQuery = "SELECT PARSE_DATE('YYYYMMDD', '99991231') date1, "
+ "PARSE_TIME('HH24MISS', '122333') time1\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testPositionOperator() {
final RelBuilder builder = relBuilder();
final RexNode parseTrimNode = builder.call(SqlStdOperatorTable.POSITION,
builder.literal("a"),
builder.literal("Name"));
final RelNode root = builder.scan("EMP").
project(builder.alias(parseTrimNode, "t"))
.build();
final String expectedSql = "SELECT POSITION('a' IN 'Name') AS \"t\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSparkQuery = "SELECT POSITION('a' IN 'Name') t\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testBigQueryErrorOperator() {
final RelBuilder builder = relBuilder();
final SqlFunction errorOperator =
new SqlFunction("ERROR",
SqlKind.OTHER_FUNCTION,
ReturnTypes.VARCHAR_2000,
null,
OperandTypes.STRING_STRING,
SqlFunctionCategory.SYSTEM);
final RexNode parseTrimNode = builder.call(errorOperator,
builder.literal("Error Message!"));
final RelNode root = builder.scan("EMP").
project(builder.alias(parseTrimNode, "t"))
.build();
final String expectedSql = "SELECT ERROR('Error Message!') AS \"t\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSparkQuery = "SELECT RAISE_ERROR('Error Message!') t\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testTrue() {
final RelBuilder builder = relBuilder();
final RexNode trueRexNode = builder.call(TRUE);
final RelNode root = builder.scan("EMP")
.project(builder.alias(trueRexNode, "dm"))
.build();
final String expectedSql = "SELECT TRUE() AS \"dm\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TRUE AS dm\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testFalse() {
final RelBuilder builder = relBuilder();
final RexNode falseRexNode = builder.call(FALSE);
final RelNode root = builder.scan("EMP")
.project(builder.alias(falseRexNode, "dm"))
.build();
final String expectedSql = "SELECT FALSE() AS \"dm\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FALSE AS dm\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
}
|
core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.rel2sql;
import org.apache.calcite.config.NullCollation;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelTraitDef;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.rules.AggregateJoinTransposeRule;
import org.apache.calcite.rel.rules.AggregateProjectMergeRule;
import org.apache.calcite.rel.rules.CoreRules;
import org.apache.calcite.rel.rules.FilterExtractInnerJoinRule;
import org.apache.calcite.rel.rules.FilterJoinRule;
import org.apache.calcite.rel.rules.ProjectToWindowRule;
import org.apache.calcite.rel.rules.PruneEmptyRules;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
import org.apache.calcite.rel.type.RelRecordType;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexSubQuery;
import org.apache.calcite.runtime.FlatLists;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlDialect.Context;
import org.apache.calcite.sql.SqlDialect.DatabaseProduct;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlIntervalQualifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.SqlWriterConfig;
import org.apache.calcite.sql.dialect.CalciteSqlDialect;
import org.apache.calcite.sql.dialect.HiveSqlDialect;
import org.apache.calcite.sql.dialect.JethroDataSqlDialect;
import org.apache.calcite.sql.dialect.MssqlSqlDialect;
import org.apache.calcite.sql.dialect.MysqlSqlDialect;
import org.apache.calcite.sql.dialect.OracleSqlDialect;
import org.apache.calcite.sql.dialect.PostgresqlSqlDialect;
import org.apache.calcite.sql.dialect.SparkSqlDialect;
import org.apache.calcite.sql.fun.SqlLibraryOperators;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.SqlShuttle;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.test.CalciteAssert;
import org.apache.calcite.test.MockSqlOperatorTable;
import org.apache.calcite.test.RelBuilderTest;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.tools.Planner;
import org.apache.calcite.tools.Program;
import org.apache.calcite.tools.Programs;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RuleSet;
import org.apache.calcite.tools.RuleSets;
import org.apache.calcite.util.TestUtil;
import org.apache.calcite.util.TimestampString;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.calcite.avatica.util.TimeUnit.DAY;
import static org.apache.calcite.avatica.util.TimeUnit.MICROSECOND;
import static org.apache.calcite.sql.fun.SqlLibrary.BIG_QUERY;
import static org.apache.calcite.sql.fun.SqlLibraryOperators.FALSE;
import static org.apache.calcite.sql.fun.SqlLibraryOperators.TRUE;
import static org.apache.calcite.test.Matchers.isLinux;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Tests for {@link RelToSqlConverter}.
*/
class RelToSqlConverterTest {
/** Initiates a test case with a given SQL query. */
private Sql sql(String sql) {
return new Sql(CalciteAssert.SchemaSpec.JDBC_FOODMART, sql,
CalciteSqlDialect.DEFAULT, SqlParser.Config.DEFAULT,
UnaryOperator.identity(), null, ImmutableList.of());
}
private Sql sqlTest(String sql) {
return new Sql(CalciteAssert.SchemaSpec.FOODMART_TEST, sql,
CalciteSqlDialect.DEFAULT, SqlParser.Config.DEFAULT,
UnaryOperator.identity(), null, ImmutableList.of());
}
/** Initiates a test case with a given {@link RelNode} supplier. */
private Sql relFn(Function<RelBuilder, RelNode> relFn) {
return sql("?").relFn(relFn);
}
private static Planner getPlanner(List<RelTraitDef> traitDefs,
SqlParser.Config parserConfig, SchemaPlus schema,
SqlToRelConverter.Config sqlToRelConf, Program... programs) {
final MockSqlOperatorTable operatorTable =
new MockSqlOperatorTable(SqlStdOperatorTable.instance());
MockSqlOperatorTable.addRamp(operatorTable);
final FrameworkConfig config = Frameworks.newConfigBuilder()
.parserConfig(parserConfig)
.defaultSchema(schema)
.traitDefs(traitDefs)
.sqlToRelConverterConfig(sqlToRelConf)
.programs(programs)
.operatorTable(operatorTable)
.build();
return Frameworks.getPlanner(config);
}
private static JethroDataSqlDialect jethroDataSqlDialect() {
Context dummyContext = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(SqlDialect.DatabaseProduct.JETHRO)
.withDatabaseMajorVersion(1)
.withDatabaseMinorVersion(0)
.withDatabaseVersion("1.0")
.withIdentifierQuoteString("\"")
.withNullCollation(NullCollation.HIGH)
.withJethroInfo(JethroDataSqlDialect.JethroInfo.EMPTY);
return new JethroDataSqlDialect(dummyContext);
}
private static MysqlSqlDialect mySqlDialect(NullCollation nullCollation) {
return new MysqlSqlDialect(MysqlSqlDialect.DEFAULT_CONTEXT
.withNullCollation(nullCollation));
}
/** Returns a collection of common dialects, and the database products they
* represent. */
private static Map<SqlDialect, DatabaseProduct> dialects() {
return ImmutableMap.<SqlDialect, DatabaseProduct>builder()
.put(SqlDialect.DatabaseProduct.BIG_QUERY.getDialect(),
SqlDialect.DatabaseProduct.BIG_QUERY)
.put(SqlDialect.DatabaseProduct.CALCITE.getDialect(),
SqlDialect.DatabaseProduct.CALCITE)
.put(SqlDialect.DatabaseProduct.DB2.getDialect(),
SqlDialect.DatabaseProduct.DB2)
.put(SqlDialect.DatabaseProduct.HIVE.getDialect(),
SqlDialect.DatabaseProduct.HIVE)
.put(jethroDataSqlDialect(),
SqlDialect.DatabaseProduct.JETHRO)
.put(SqlDialect.DatabaseProduct.MSSQL.getDialect(),
SqlDialect.DatabaseProduct.MSSQL)
.put(SqlDialect.DatabaseProduct.MYSQL.getDialect(),
SqlDialect.DatabaseProduct.MYSQL)
.put(mySqlDialect(NullCollation.HIGH),
SqlDialect.DatabaseProduct.MYSQL)
.put(SqlDialect.DatabaseProduct.ORACLE.getDialect(),
SqlDialect.DatabaseProduct.ORACLE)
.put(SqlDialect.DatabaseProduct.POSTGRESQL.getDialect(),
SqlDialect.DatabaseProduct.POSTGRESQL)
.put(DatabaseProduct.PRESTO.getDialect(),
DatabaseProduct.PRESTO)
.build();
}
/** Creates a RelBuilder. */
private static RelBuilder relBuilder() {
return RelBuilder.create(RelBuilderTest.config().build());
}
/** Converts a relational expression to SQL. */
private String toSql(RelNode root) {
return toSql(root, SqlDialect.DatabaseProduct.CALCITE.getDialect());
}
/** Converts a relational expression to SQL in a given dialect. */
private static String toSql(RelNode root, SqlDialect dialect) {
return toSql(root, dialect, c ->
c.withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false)
.withUpdateSetListNewline(false)
.withIndentation(0));
}
/** Converts a relational expression to SQL in a given dialect
* and with a particular writer configuration. */
private static String toSql(RelNode root, SqlDialect dialect,
UnaryOperator<SqlWriterConfig> transform) {
final RelToSqlConverter converter = new RelToSqlConverter(dialect);
final SqlNode sqlNode = converter.visitRoot(root).asStatement();
return sqlNode.toSqlString(c -> transform.apply(c.withDialect(dialect)))
.getSql();
}
@Test public void testSimpleSelectWithOrderByAliasAsc() {
final String query = "select sku+1 as a from \"product\" order by a";
final String bigQueryExpected = "SELECT SKU + 1 AS A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL, A";
final String hiveExpected = "SELECT SKU + 1 A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL, A";
sql(query)
.withBigQuery()
.ok(bigQueryExpected)
.withHive()
.ok(hiveExpected);
}
@Test public void testSimpleSelectWithOrderByAliasDesc() {
final String query = "select sku+1 as a from \"product\" order by a desc";
final String bigQueryExpected = "SELECT SKU + 1 AS A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL DESC, A DESC";
final String hiveExpected = "SELECT SKU + 1 A\nFROM foodmart.product\n"
+ "ORDER BY A IS NULL DESC, A DESC";
sql(query)
.withBigQuery()
.ok(bigQueryExpected)
.withHive()
.ok(hiveExpected);
}
@Test void testSimpleSelectStarFromProductTable() {
String query = "select * from \"product\"";
sql(query).ok("SELECT *\nFROM \"foodmart\".\"product\"");
}
@Test void testAggregateFilterWhereToSqlFromProductTable() {
String query = "select\n"
+ " sum(\"shelf_width\") filter (where \"net_weight\" > 0),\n"
+ " sum(\"shelf_width\")\n"
+ "from \"foodmart\".\"product\"\n"
+ "where \"product_id\" > 0\n"
+ "group by \"product_id\"";
final String expected = "SELECT"
+ " SUM(\"shelf_width\") FILTER (WHERE \"net_weight\" > 0 IS TRUE),"
+ " SUM(\"shelf_width\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" > 0\n"
+ "GROUP BY \"product_id\"";
sql(query).ok(expected);
}
@Test void testAggregateFilterWhereToBigQuerySqlFromProductTable() {
String query = "select\n"
+ " sum(\"shelf_width\") filter (where \"net_weight\" > 0),\n"
+ " sum(\"shelf_width\")\n"
+ "from \"foodmart\".\"product\"\n"
+ "where \"product_id\" > 0\n"
+ "group by \"product_id\"";
final String expected = "SELECT SUM(CASE WHEN net_weight > 0 IS TRUE"
+ " THEN shelf_width ELSE NULL END), "
+ "SUM(shelf_width)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id > 0\n"
+ "GROUP BY product_id";
sql(query).withBigQuery().ok(expected);
}
@Test void testPivotToSqlFromProductTable() {
String query = "select * from (\n"
+ " select \"shelf_width\", \"net_weight\", \"product_id\"\n"
+ " from \"foodmart\".\"product\")\n"
+ " pivot (sum(\"shelf_width\") as w, count(*) as c\n"
+ " for (\"product_id\") in (10, 20))";
final String expected = "SELECT \"net_weight\","
+ " SUM(\"shelf_width\") FILTER (WHERE \"product_id\" = 10) AS \"10_W\","
+ " COUNT(*) FILTER (WHERE \"product_id\" = 10) AS \"10_C\","
+ " SUM(\"shelf_width\") FILTER (WHERE \"product_id\" = 20) AS \"20_W\","
+ " COUNT(*) FILTER (WHERE \"product_id\" = 20) AS \"20_C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"net_weight\"";
// BigQuery does not support FILTER, so we generate CASE around the
// arguments to the aggregate functions.
final String expectedBigQuery = "SELECT net_weight,"
+ " SUM(CASE WHEN product_id = 10 "
+ "THEN shelf_width ELSE NULL END) AS `10_W`,"
+ " COUNT(CASE WHEN product_id = 10 THEN 1 ELSE NULL END) AS `10_C`,"
+ " SUM(CASE WHEN product_id = 20 "
+ "THEN shelf_width ELSE NULL END) AS `20_W`,"
+ " COUNT(CASE WHEN product_id = 20 THEN 1 ELSE NULL END) AS `20_C`\n"
+ "FROM foodmart.product\n"
+ "GROUP BY net_weight";
sql(query).ok(expected)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testSimpleSelectQueryFromProductTable() {
String query = "select \"product_id\", \"product_class_id\" from \"product\"";
final String expected = "SELECT \"product_id\", \"product_class_id\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithWhereClauseOfLessThan() {
String query = "select \"product_id\", \"shelf_width\"\n"
+ "from \"product\" where \"product_id\" < 10";
final String expected = "SELECT \"product_id\", \"shelf_width\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" < 10";
sql(query).ok(expected);
}
@Test void testSelectWhereNotEqualsOrNull() {
String query = "select \"product_id\", \"shelf_width\"\n"
+ "from \"product\"\n"
+ "where \"net_weight\" <> 10 or \"net_weight\" is null";
final String expected = "SELECT \"product_id\", \"shelf_width\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" <> 10 OR \"net_weight\" IS NULL";
sql(query).ok(expected);
}
@Test void testSelectQueryWithWhereClauseOfBasicOperators() {
String query = "select * from \"product\" "
+ "where (\"product_id\" = 10 OR \"product_id\" <= 5) "
+ "AND (80 >= \"shelf_width\" OR \"shelf_width\" > 30)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE (\"product_id\" = 10 OR \"product_id\" <= 5) "
+ "AND (80 >= \"shelf_width\" OR \"shelf_width\" > 30)";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroupBy() {
String query = "select count(*) from \"product\" group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithHiveCube() {
String query = "select \"product_class_id\", \"product_id\", count(*) "
+ "from \"product\" group by cube(\"product_class_id\", \"product_id\")";
String expected = "SELECT product_class_id, product_id, COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, product_id WITH CUBE";
sql(query).withHive().ok(expected);
SqlDialect sqlDialect = sql(query).withHive().dialect;
assertTrue(sqlDialect.supportsGroupByWithCube());
}
@Test void testSelectQueryWithHiveRollup() {
String query = "select \"product_class_id\", \"product_id\", count(*) "
+ "from \"product\" group by rollup(\"product_class_id\", \"product_id\")";
String expected = "SELECT product_class_id, product_id, COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, product_id WITH ROLLUP";
sql(query).withHive().ok(expected);
SqlDialect sqlDialect = sql(query).withHive().dialect;
assertTrue(sqlDialect.supportsGroupByWithRollup());
}
@Test void testSelectQueryWithGroupByEmpty() {
final String sql0 = "select count(*) from \"product\" group by ()";
final String sql1 = "select count(*) from \"product\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMySql = "SELECT COUNT(*)\n"
+ "FROM `foodmart`.`product`";
final String expectedPresto = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
sql(sql0)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto);
sql(sql1)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto);
}
@Test void testSelectQueryWithGroupByEmpty2() {
final String query = "select 42 as c from \"product\" group by ()";
final String expected = "SELECT 42 AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ()";
final String expectedMySql = "SELECT 42 AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY ()";
final String expectedPresto = "SELECT 42 AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ()";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3097">[CALCITE-3097]
* GROUPING SETS breaks on sets of size > 1 due to precedence issues</a>,
* in particular, that we maintain proper precedence around nested lists. */
@Test void testGroupByGroupingSets() {
final String query = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
+ "group by GROUPING SETS ((\"product_class_id\", \"brand_name\"),"
+ " (\"product_class_id\"))\n"
+ "order by 2, 1";
final String expected = "SELECT \"product_class_id\", \"brand_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY GROUPING SETS((\"product_class_id\", \"brand_name\"),"
+ " \"product_class_id\")\n"
+ "ORDER BY \"brand_name\", \"product_class_id\"";
sql(query)
.withPostgresql()
.ok(expected);
}
/** Tests GROUP BY ROLLUP of two columns. The SQL for MySQL has
* "GROUP BY ... ROLLUP" but no "ORDER BY". */
@Test void testSelectQueryWithGroupByRollup() {
final String query = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 1, 2";
final String expected = "SELECT \"product_class_id\", \"brand_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\", \"brand_name\")\n"
+ "ORDER BY \"product_class_id\", \"brand_name\"";
final String expectedMySql = "SELECT `product_class_id`, `brand_name`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id`, `brand_name` WITH ROLLUP";
final String expectedMySql8 = "SELECT `product_class_id`, `brand_name`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY ROLLUP(`product_class_id`, `brand_name`)\n"
+ "ORDER BY `product_class_id` NULLS LAST, `brand_name` NULLS LAST";
final String expectedHive = "SELECT product_class_id, brand_name\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, brand_name WITH ROLLUP";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withMysql8()
.ok(expectedMySql8)
.withHive()
.ok(expectedHive);
}
/** As {@link #testSelectQueryWithGroupByRollup()},
* but ORDER BY columns reversed. */
@Test void testSelectQueryWithGroupByRollup2() {
final String query = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 2, 1";
final String expected = "SELECT \"product_class_id\", \"brand_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\", \"brand_name\")\n"
+ "ORDER BY \"brand_name\", \"product_class_id\"";
final String expectedMySql = "SELECT `product_class_id`, `brand_name`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `brand_name`, `product_class_id` WITH ROLLUP";
final String expectedHive = "SELECT product_class_id, brand_name\n"
+ "FROM foodmart.product\n"
+ "GROUP BY brand_name, product_class_id WITH ROLLUP";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withHive()
.ok(expectedHive);
}
@Test public void testSimpleSelectWithGroupByAlias() {
final String query = "select 'literal' as \"a\", sku + 1 as b from"
+ " \"product\" group by 'literal', sku + 1";
final String bigQueryExpected = "SELECT 'literal' AS a, SKU + 1 AS B\n"
+ "FROM foodmart.product\n"
+ "GROUP BY 1, B";
sql(query)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testSimpleSelectWithGroupByAliasAndAggregate() {
final String query = "select 'literal' as \"a\", sku + 1 as \"b\", sum(\"product_id\") from"
+ " \"product\" group by sku + 1, 'literal'";
final String bigQueryExpected = "SELECT 'literal' AS a, SKU + 1 AS b, SUM(product_id)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY b, 1";
sql(query)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testDuplicateLiteralInSelectForGroupBy() {
final String query = "select '1' as \"a\", sku + 1 as b, '1' as \"d\" from"
+ " \"product\" group by '1', sku + 1";
final String expectedSql = "SELECT '1' a, SKU + 1 B, '1' d\n"
+ "FROM foodmart.product\n"
+ "GROUP BY '1', SKU + 1";
final String bigQueryExpected = "SELECT '1' AS a, SKU + 1 AS B, '1' AS d\n"
+ "FROM foodmart.product\n"
+ "GROUP BY 1, B";
sql(query)
.withHive()
.ok(expectedSql)
.withSpark()
.ok(expectedSql)
.withBigQuery()
.ok(bigQueryExpected);
}
/** Tests a query with GROUP BY and a sub-query which is also with GROUP BY.
* If we flatten sub-queries, the number of rows going into AVG becomes
* incorrect. */
@Test void testSelectQueryWithGroupBySubQuery1() {
final String query = "select \"product_class_id\", avg(\"product_id\")\n"
+ "from (select \"product_class_id\", \"product_id\", avg(\"product_class_id\")\n"
+ "from \"product\"\n"
+ "group by \"product_class_id\", \"product_id\") as t\n"
+ "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", AVG(\"product_id\")\n"
+ "FROM (SELECT \"product_class_id\", \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\") AS \"t1\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
/** Tests query without GROUP BY but an aggregate function
* and a sub-query which is with GROUP BY. */
@Test void testSelectQueryWithGroupBySubQuery2() {
final String query = "select sum(\"product_id\")\n"
+ "from (select \"product_class_id\", \"product_id\"\n"
+ "from \"product\"\n"
+ "group by \"product_class_id\", \"product_id\") as t";
final String expected = "SELECT SUM(\"product_id\")\n"
+ "FROM (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\") AS \"t1\"";
final String expectedMysql = "SELECT SUM(`product_id`)\n"
+ "FROM (SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id`, `product_id`) AS `t1`";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMysql);
// Equivalent sub-query that uses SELECT DISTINCT
final String query2 = "select sum(\"product_id\")\n"
+ "from (select distinct \"product_class_id\", \"product_id\"\n"
+ " from \"product\") as t";
sql(query2)
.ok(expected)
.withMysql()
.ok(expectedMysql);
}
/** CUBE of one column is equivalent to ROLLUP, and Calcite recognizes
* this. */
@Test void testSelectQueryWithSingletonCube() {
final String query = "select \"product_class_id\", count(*) as c\n"
+ "from \"product\"\n"
+ "group by cube(\"product_class_id\")\n"
+ "order by 1, 2";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "ORDER BY \"product_class_id\", \"C\"";
final String expectedMySql = "SELECT `product_class_id`, COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id` WITH ROLLUP\n"
+ "ORDER BY `product_class_id` IS NULL, `product_class_id`,"
+ " `C` IS NULL, `C`";
final String expectedHive = "SELECT product_class_id, COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id WITH ROLLUP\n"
+ "ORDER BY product_class_id IS NULL, product_class_id,"
+ " C IS NULL, C";
final String expectedPresto = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "ORDER BY \"product_class_id\" IS NULL, \"product_class_id\", "
+ "COUNT(*) IS NULL, COUNT(*)";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto)
.withHive()
.ok(expectedHive);
}
/** As {@link #testSelectQueryWithSingletonCube()}, but no ORDER BY
* clause. */
@Test void testSelectQueryWithSingletonCubeNoOrderBy() {
final String query = "select \"product_class_id\", count(*) as c\n"
+ "from \"product\"\n"
+ "group by cube(\"product_class_id\")";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")";
final String expectedMySql = "SELECT `product_class_id`, COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id` WITH ROLLUP";
final String expectedPresto = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")";
final String expectedHive = "SELECT product_class_id, COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id WITH ROLLUP";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto)
.withHive()
.ok(expectedHive);
}
/** Cannot rewrite if ORDER BY contains a column not in GROUP BY (in this
* case COUNT(*)). */
@Test void testSelectQueryWithRollupOrderByCount() {
final String query = "select \"product_class_id\", \"brand_name\",\n"
+ " count(*) as c\n"
+ "from \"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 1, 2, 3";
final String expected = "SELECT \"product_class_id\", \"brand_name\","
+ " COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\", \"brand_name\")\n"
+ "ORDER BY \"product_class_id\", \"brand_name\", \"C\"";
final String expectedMySql = "SELECT `product_class_id`, `brand_name`,"
+ " COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id`, `brand_name` WITH ROLLUP\n"
+ "ORDER BY `product_class_id` IS NULL, `product_class_id`,"
+ " `brand_name` IS NULL, `brand_name`,"
+ " `C` IS NULL, `C`";
final String expectedHive = "SELECT product_class_id, brand_name,"
+ " COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id, brand_name WITH ROLLUP\n"
+ "ORDER BY product_class_id IS NULL, product_class_id,"
+ " brand_name IS NULL, brand_name,"
+ " C IS NULL, C";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withHive()
.ok(expectedHive);
}
/** As {@link #testSelectQueryWithSingletonCube()}, but with LIMIT. */
@Test void testSelectQueryWithCubeLimit() {
final String query = "select \"product_class_id\", count(*) as c\n"
+ "from \"product\"\n"
+ "group by cube(\"product_class_id\")\n"
+ "limit 5";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "FETCH NEXT 5 ROWS ONLY";
// If a MySQL 5 query has GROUP BY ... ROLLUP, you cannot add ORDER BY,
// but you can add LIMIT.
final String expectedMySql = "SELECT `product_class_id`, COUNT(*) AS `C`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_class_id` WITH ROLLUP\n"
+ "LIMIT 5";
final String expectedPresto = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_class_id\")\n"
+ "LIMIT 5";
final String expectedHive = "SELECT product_class_id, COUNT(*) C\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id WITH ROLLUP\n"
+ "LIMIT 5";
sql(query)
.ok(expected)
.withMysql()
.ok(expectedMySql)
.withPresto()
.ok(expectedPresto)
.withHive()
.ok(expectedHive);
}
@Test void testSelectQueryWithMinAggregateFunction() {
String query = "select min(\"net_weight\") from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT MIN(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithMinAggregateFunction1() {
String query = "select \"product_class_id\", min(\"net_weight\") from"
+ " \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", MIN(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithSumAggregateFunction() {
String query =
"select sum(\"net_weight\") from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT SUM(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithMultipleAggregateFunction() {
String query = "select sum(\"net_weight\"), min(\"low_fat\"), count(*)"
+ " from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT SUM(\"net_weight\"), MIN(\"low_fat\"),"
+ " COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithMultipleAggregateFunction1() {
String query = "select \"product_class_id\","
+ " sum(\"net_weight\"), min(\"low_fat\"), count(*)"
+ " from \"product\" group by \"product_class_id\" ";
final String expected = "SELECT \"product_class_id\","
+ " SUM(\"net_weight\"), MIN(\"low_fat\"), COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroupByAndProjectList() {
String query = "select \"product_class_id\", \"product_id\", count(*) "
+ "from \"product\" group by \"product_class_id\", \"product_id\" ";
final String expected = "SELECT \"product_class_id\", \"product_id\","
+ " COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
sql(query).ok(expected);
}
/*@Test public void testGroupByAliasReplacementWithGroupByExpression() {
String query = "select \"product_class_id\" + \"product_id\" as product_id, "
+ "\"product_id\" + 2 as prod_id, count(1) as num_records"
+ " from \"product\""
+ " group by \"product_class_id\" + \"product_id\", \"product_id\" + 2";
final String expected = "SELECT product_class_id + product_id AS PRODUCT_ID,"
+ " product_id + 2 AS PROD_ID,"
+ " COUNT(*) AS NUM_RECORDS\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_class_id + product_id, PROD_ID";
sql(query).withBigQuery().ok(expected);
}
@Test public void testGroupByAliasReplacementWithGroupByExpression2() {
String query = "select "
+ "(case when \"product_id\" = 1 then \"product_id\" else 1234 end)"
+ " as product_id, count(1) as num_records from \"product\""
+ " group by (case when \"product_id\" = 1 then \"product_id\" else 1234 end)";
final String expected = "SELECT "
+ "CASE WHEN product_id = 1 THEN product_id ELSE 1234 END AS PRODUCT_ID,"
+ " COUNT(*) AS NUM_RECORDS\n"
+ "FROM foodmart.product\n"
+ "GROUP BY CASE WHEN product_id = 1 THEN product_id ELSE 1234 END";
sql(query).withBigQuery().ok(expected);
}*/
@Test void testCastDecimal1() {
final String query = "select -0.0000000123\n"
+ " from \"expense_fact\"";
final String expected = "SELECT -1.23E-8\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2713">[CALCITE-2713]
* JDBC adapter may generate casts on PostgreSQL for VARCHAR type exceeding
* max length</a>. */
@Test void testCastLongVarchar1() {
final String query = "select cast(\"store_id\" as VARCHAR(10485761))\n"
+ " from \"expense_fact\"";
final String expectedPostgreSQL = "SELECT CAST(\"store_id\" AS VARCHAR(256))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withPostgresqlModifiedTypeSystem()
.ok(expectedPostgreSQL);
final String expectedOracle = "SELECT CAST(\"store_id\" AS VARCHAR(512))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withOracleModifiedTypeSystem()
.ok(expectedOracle);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2713">[CALCITE-2713]
* JDBC adapter may generate casts on PostgreSQL for VARCHAR type exceeding
* max length</a>. */
@Test void testCastLongVarchar2() {
final String query = "select cast(\"store_id\" as VARCHAR(175))\n"
+ " from \"expense_fact\"";
final String expectedPostgreSQL = "SELECT CAST(\"store_id\" AS VARCHAR(175))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withPostgresqlModifiedTypeSystem()
.ok(expectedPostgreSQL);
final String expectedOracle = "SELECT CAST(\"store_id\" AS VARCHAR(175))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withOracleModifiedTypeSystem()
.ok(expectedOracle);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1174">[CALCITE-1174]
* When generating SQL, translate SUM0(x) to COALESCE(SUM(x), 0)</a>. */
@Test void testSum0BecomesCoalesce() {
final Function<RelBuilder, RelNode> fn = b -> b.scan("EMP")
.aggregate(b.groupKey(),
b.aggregateCall(SqlStdOperatorTable.SUM0, b.field(3))
.as("s"))
.build();
final String expectedMysql = "SELECT COALESCE(SUM(`MGR`), 0) AS `s`\n"
+ "FROM `scott`.`EMP`";
final String expectedPostgresql = "SELECT COALESCE(SUM(\"MGR\"), 0) AS \"s\"\n"
+ "FROM \"scott\".\"EMP\"";
relFn(fn)
.withPostgresql()
.ok(expectedPostgresql)
.withMysql()
.ok(expectedMysql);
}
/** As {@link #testSum0BecomesCoalesce()} but for windowed aggregates. */
@Test void testWindowedSum0BecomesCoalesce() {
final String query = "select\n"
+ " AVG(\"net_weight\") OVER (order by \"product_id\" rows 3 preceding)\n"
+ "from \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT CASE WHEN (COUNT(\"net_weight\")"
+ " OVER (ORDER BY \"product_id\" ROWS BETWEEN 3 PRECEDING AND CURRENT ROW)) > 0 "
+ "THEN COALESCE(SUM(\"net_weight\")"
+ " OVER (ORDER BY \"product_id\" ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), 0)"
+ " ELSE NULL END / (COUNT(\"net_weight\")"
+ " OVER (ORDER BY \"product_id\" ROWS BETWEEN 3 PRECEDING AND CURRENT ROW))\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withPostgresql()
.ok(expectedPostgresql);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2722">[CALCITE-2722]
* SqlImplementor createLeftCall method throws StackOverflowError</a>. */
@Test void testStack() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(
b.or(
IntStream.range(1, 10000)
.mapToObj(i -> b.equals(b.field("EMPNO"), b.literal(i)))
.collect(Collectors.toList())))
.build();
final SqlDialect dialect = SqlDialect.DatabaseProduct.CALCITE.getDialect();
final RelNode root = relFn.apply(relBuilder());
final RelToSqlConverter converter = new RelToSqlConverter(dialect);
final SqlNode sqlNode = converter.visitRoot(root).asStatement();
final String sqlString = sqlNode.accept(new SqlShuttle())
.toSqlString(dialect).getSql();
assertThat(sqlString, notNullValue());
}
@Test void testAntiJoin() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.join(
JoinRelType.ANTI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE NOT EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiJoin() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiJoinFilter() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.filter(
builder.call(SqlStdOperatorTable.GREATER_THAN,
builder.field(builder.peek().getRowType().getField("EMPNO", false, false).getIndex()),
builder.literal((short) 10)))
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM (SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" > 10) AS \"t\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiJoinProject() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.scan("DEPT")
.scan("EMP")
.project(
builder.field(builder.peek().getRowType().getField("EMPNO", false, false).getIndex()),
builder.field(builder.peek().getRowType().getField("DEPTNO", false, false).getIndex()))
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM (SELECT \"EMPNO\", \"DEPTNO\"\n"
+ "FROM \"scott\".\"EMP\") AS \"t\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
@Test void testSemiNestedJoin() {
final RelBuilder builder = relBuilder();
final RelNode base = builder
.scan("EMP")
.scan("EMP")
.join(
JoinRelType.INNER, builder.equals(
builder.field(2, 0, "EMPNO"),
builder.field(2, 1, "EMPNO")))
.build();
final RelNode root = builder
.scan("DEPT")
.push(base)
.join(
JoinRelType.SEMI, builder.equals(
builder.field(2, 1, "DEPTNO"),
builder.field(2, 0, "DEPTNO")))
.project(builder.field("DEPTNO"))
.build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "INNER JOIN \"scott\".\"EMP\" AS \"EMP0\" ON \"EMP\".\"EMPNO\" = \"EMP0\".\"EMPNO\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
assertThat(toSql(root), isLinux(expectedSql));
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2792">[CALCITE-2792]
* Stackoverflow while evaluating filter with large number of OR conditions</a>. */
@Disabled
@Test void testBalancedBinaryCall() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(
b.and(
b.or(IntStream.range(0, 4)
.mapToObj(i -> b.equals(b.field("EMPNO"), b.literal(i)))
.collect(Collectors.toList())),
b.or(IntStream.range(5, 8)
.mapToObj(i -> b.equals(b.field("DEPTNO"), b.literal(i)))
.collect(Collectors.toList()))))
.build();
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" IN (0, 1, 2, 3) AND \"DEPTNO\" IN (5, 6, 7)";
relFn(relFn).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1946">[CALCITE-1946]
* JDBC adapter should generate sub-SELECT if dialect does not support nested
* aggregate functions</a>. */
@Test void testNestedAggregates() {
// PostgreSQL, MySQL, Vertica do not support nested aggregate functions, so
// for these, the JDBC adapter generates a SELECT in the FROM clause.
// Oracle can do it in a single SELECT.
final String query = "select\n"
+ " SUM(\"net_weight1\") as \"net_weight_converted\"\n"
+ " from ("
+ " select\n"
+ " SUM(\"net_weight\") as \"net_weight1\"\n"
+ " from \"foodmart\".\"product\"\n"
+ " group by \"product_id\")";
final String expectedOracle = "SELECT SUM(SUM(\"net_weight\")) \"net_weight_converted\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\"";
final String expectedMySQL = "SELECT SUM(`net_weight1`) AS `net_weight_converted`\n"
+ "FROM (SELECT SUM(`net_weight`) AS `net_weight1`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_id`) AS `t1`";
final String expectedPostgresql = "SELECT SUM(\"net_weight1\") AS \"net_weight_converted\"\n"
+ "FROM (SELECT SUM(\"net_weight\") AS \"net_weight1\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\") AS \"t1\"";
final String expectedVertica = expectedPostgresql;
final String expectedBigQuery = "SELECT SUM(net_weight1) AS net_weight_converted\n"
+ "FROM (SELECT SUM(net_weight) AS net_weight1\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id) AS t1";
final String expectedHive = "SELECT SUM(net_weight1) net_weight_converted\n"
+ "FROM (SELECT SUM(net_weight) net_weight1\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id) t1";
final String expectedSpark = expectedHive;
sql(query)
.withOracle()
.ok(expectedOracle)
.withMysql()
.ok(expectedMySQL)
.withVertica()
.ok(expectedVertica)
.withPostgresql()
.ok(expectedPostgresql)
.withBigQuery()
.ok(expectedBigQuery)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark);
}
@Test public void testAnalyticalFunctionInAggregate() {
final String query = "select\n"
+ "MAX(\"rnk\") AS \"rnk1\""
+ " from ("
+ " select\n"
+ " rank() over (order by \"hire_date\") AS \"rnk\""
+ " from \"foodmart\".\"employee\"\n)";
final String expectedSql = "SELECT MAX(RANK() OVER (ORDER BY \"hire_date\")) AS \"rnk1\"\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT RANK() OVER (ORDER BY hire_date NULLS LAST) rnk\n"
+ "FROM foodmart.employee) t";
final String expectedSpark = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT RANK() OVER (ORDER BY hire_date NULLS LAST) rnk\n"
+ "FROM foodmart.employee) t";
final String expectedBigQuery = "SELECT MAX(rnk) AS rnk1\n"
+ "FROM (SELECT RANK() OVER (ORDER BY hire_date IS NULL, hire_date) AS rnk\n"
+ "FROM foodmart.employee) AS t";
sql(query)
.ok(expectedSql)
.withHive2()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testAnalyticalFunctionInAggregate1() {
final String query = "select\n"
+ "MAX(\"rnk\") AS \"rnk1\""
+ " from ("
+ " select\n"
+ " case when rank() over (order by \"hire_date\") = 1"
+ " then 100"
+ " else 200"
+ " end as \"rnk\""
+ " from \"foodmart\".\"employee\"\n)";
final String expectedSql = "SELECT MAX(CASE WHEN (RANK() OVER (ORDER BY \"hire_date\")) = 1 "
+ "THEN 100 ELSE 200 END) AS \"rnk1\"\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT CASE WHEN (RANK() OVER (ORDER BY hire_date NULLS LAST)) = 1"
+ " THEN 100 ELSE 200 END rnk\n"
+ "FROM foodmart.employee) t";
final String expectedSpark = "SELECT MAX(rnk) rnk1\n"
+ "FROM (SELECT CASE WHEN (RANK() OVER (ORDER BY hire_date NULLS LAST)) = 1 "
+ "THEN 100 ELSE 200 END rnk\n"
+ "FROM foodmart.employee) t";
final String expectedBigQuery = "SELECT MAX(rnk) AS rnk1\n"
+ "FROM (SELECT CASE WHEN (RANK() OVER (ORDER BY hire_date IS NULL, hire_date)) = 1 "
+ "THEN 100 ELSE 200 END AS rnk\n"
+ "FROM foodmart.employee) AS t";
sql(query)
.ok(expectedSql)
.withHive2()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testAnalyticalFunctionInGroupByWhereAnalyticalFunctionIsInputOfOtherFunction() {
final String query = "select\n"
+ "\"rnk\""
+ " from ("
+ " select\n"
+ " CASE WHEN \"salary\"=20 THEN MAX(\"salary\") OVER(PARTITION BY \"position_id\") END AS \"rnk\""
+ " from \"foodmart\".\"employee\"\n) group by \"rnk\"";
final String expectedSql = "SELECT CASE WHEN CAST(\"salary\" AS DECIMAL(14, 4)) = 20 THEN"
+ " MAX(\"salary\") OVER (PARTITION BY \"position_id\" RANGE BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING) ELSE NULL END AS \"rnk\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY CASE WHEN CAST(\"salary\" AS DECIMAL(14, 4)) = 20 THEN MAX"
+ "(\"salary\") OVER (PARTITION BY \"position_id\" RANGE BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING) ELSE NULL END";
final String expectedHive = "SELECT CASE WHEN CAST(salary AS DECIMAL(14, 4)) = 20 THEN MAX"
+ "(salary) OVER (PARTITION BY position_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED "
+ "FOLLOWING) ELSE NULL END rnk\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY CASE WHEN CAST(salary AS DECIMAL(14, 4)) = 20 THEN MAX(salary) OVER "
+ "(PARTITION BY position_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) "
+ "ELSE NULL END";
final String expectedSpark = expectedHive;
final String expectedBigQuery = "SELECT rnk\n"
+ "FROM (SELECT CASE WHEN CAST(salary AS NUMERIC) = 20 THEN MAX(salary) OVER "
+ "(PARTITION BY position_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) "
+ "ELSE NULL END AS rnk\n"
+ "FROM foodmart.employee) AS t\n"
+ "GROUP BY rnk";
final String mssql = "SELECT CASE WHEN CAST([salary] AS DECIMAL(14, 4)) = 20 THEN MAX("
+ "[salary]) OVER (PARTITION BY [position_id] ORDER BY [salary] ROWS BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING) ELSE NULL END AS [rnk]\n"
+ "FROM [foodmart].[employee]\n"
+ "GROUP BY CASE WHEN CAST([salary] AS DECIMAL(14, 4)) = 20 THEN MAX([salary]) OVER "
+ "(PARTITION BY [position_id] ORDER BY [salary] ROWS BETWEEN UNBOUNDED PRECEDING AND "
+ "UNBOUNDED FOLLOWING) ELSE NULL END";
sql(query)
.ok(expectedSql)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(mssql);
}
@Test public void testAnalyticalFunctionInGroupByWhereAnalyticalFunctionIsInput() {
final String query = "select\n"
+ "\"rnk\""
+ " from ("
+ " select\n"
+ " case when row_number() over (PARTITION by \"hire_date\") = 1 THEN 100 else 200 END AS \"rnk\""
+ " from \"foodmart\".\"employee\"\n) group by \"rnk\"";
final String expectedSql = "SELECT CASE WHEN (ROW_NUMBER() OVER (PARTITION BY \"hire_date\"))"
+ " = 1 THEN 100 ELSE 200 END AS \"rnk\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY CASE WHEN"
+ " (ROW_NUMBER() OVER (PARTITION BY \"hire_date\")) = 1 THEN 100 ELSE 200 END";
final String expectedHive = "SELECT CASE WHEN (ROW_NUMBER() OVER (PARTITION BY hire_date)) = "
+ "1 THEN 100 ELSE 200 END rnk\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY CASE WHEN (ROW_NUMBER() "
+ "OVER (PARTITION BY hire_date)) = 1 THEN 100 ELSE 200 END";
final String expectedSpark = expectedHive;
final String expectedBigQuery = "SELECT rnk\n"
+ "FROM (SELECT CASE WHEN (ROW_NUMBER() OVER "
+ "(PARTITION BY hire_date)) = 1 THEN 100 ELSE 200 END AS rnk\n"
+ "FROM foodmart.employee) AS t\n"
+ "GROUP BY rnk";
final String mssql = "SELECT CASE WHEN (ROW_NUMBER() OVER (PARTITION BY [hire_date])) = 1 "
+ "THEN 100 ELSE 200 END AS [rnk]\n"
+ "FROM [foodmart].[employee]\nGROUP BY CASE WHEN "
+ "(ROW_NUMBER() OVER (PARTITION BY [hire_date])) = 1 THEN 100 ELSE 200 END";
sql(query)
.ok(expectedSql)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(mssql);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2628">[CALCITE-2628]
* JDBC adapter throws NullPointerException while generating GROUP BY query
* for MySQL</a>.
*
* <p>MySQL does not support nested aggregates, so {@link RelToSqlConverter}
* performs some extra checks, looking for aggregates in the input
* sub-query, and these would fail with {@code NullPointerException}
* and {@code ClassCastException} in some cases. */
@Test void testNestedAggregatesMySqlTable() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.aggregate(b.groupKey(),
b.count(false, "c", b.field(3)))
.build();
final String expectedSql = "SELECT COUNT(`MGR`) AS `c`\n"
+ "FROM `scott`.`EMP`";
relFn(relFn).withMysql().ok(expectedSql);
}
/** As {@link #testNestedAggregatesMySqlTable()}, but input is a sub-query,
* not a table. */
@Test void testNestedAggregatesMySqlStar() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(b.equals(b.field("DEPTNO"), b.literal(10)))
.aggregate(b.groupKey(),
b.count(false, "c", b.field(3)))
.build();
final String expectedSql = "SELECT COUNT(`MGR`) AS `c`\n"
+ "FROM `scott`.`EMP`\n"
+ "WHERE `DEPTNO` = 10";
relFn(relFn).withMysql().ok(expectedSql);
}
@Test public void testTableFunctionScanWithUnnest() {
final RelBuilder builder = relBuilder();
String[] array = {"abc", "bcd", "fdc"};
RelNode root = builder.functionScan(SqlStdOperatorTable.UNNEST, 0,
builder.makeArrayLiteral(Arrays.asList(array))).project(builder.field(0)).build();
final SqlDialect dialect = DatabaseProduct.BIG_QUERY.getDialect();
final String expectedSql = "SELECT *\nFROM UNNEST(ARRAY['abc', 'bcd', 'fdc'])\nAS EXPR$0";
assertThat(toSql(root, dialect), isLinux(expectedSql));
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3207">[CALCITE-3207]
* Fail to convert Join RelNode with like condition to sql statement </a>.
*/
@Test void testJoinWithLikeConditionRel2Sql() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.scan("DEPT")
.join(JoinRelType.LEFT,
b.and(
b.call(SqlStdOperatorTable.EQUALS,
b.field(2, 0, "DEPTNO"),
b.field(2, 1, "DEPTNO")),
b.call(SqlStdOperatorTable.LIKE,
b.field(2, 1, "DNAME"),
b.literal("ACCOUNTING"))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "LEFT JOIN \"scott\".\"DEPT\" "
+ "ON \"EMP\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" "
+ "AND \"DEPT\".\"DNAME\" LIKE 'ACCOUNTING'";
relFn(relFn).ok(expectedSql);
}
@Test void testSelectQueryWithGroupByAndProjectList1() {
String query = "select count(*) from \"product\"\n"
+ "group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroupByHaving() {
String query = "select count(*) from \"product\" group by \"product_class_id\","
+ " \"product_id\" having \"product_id\" > 10";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"\n"
+ "HAVING \"product_id\" > 10";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1665">[CALCITE-1665]
* Aggregates and having cannot be combined</a>. */
@Test void testSelectQueryWithGroupByHaving2() {
String query = " select \"product\".\"product_id\",\n"
+ " min(\"sales_fact_1997\".\"store_id\")\n"
+ " from \"product\"\n"
+ " inner join \"sales_fact_1997\"\n"
+ " on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ " group by \"product\".\"product_id\"\n"
+ " having count(*) > 1";
String expected = "SELECT \"product\".\"product_id\", "
+ "MIN(\"sales_fact_1997\".\"store_id\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" "
+ "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1665">[CALCITE-1665]
* Aggregates and having cannot be combined</a>. */
@Test void testSelectQueryWithGroupByHaving3() {
String query = " select * from (select \"product\".\"product_id\",\n"
+ " min(\"sales_fact_1997\".\"store_id\")\n"
+ " from \"product\"\n"
+ " inner join \"sales_fact_1997\"\n"
+ " on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ " group by \"product\".\"product_id\"\n"
+ " having count(*) > 1) where \"product_id\" > 100";
String expected = "SELECT *\n"
+ "FROM (SELECT \"product\".\"product_id\","
+ " MIN(\"sales_fact_1997\".\"store_id\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" ON \"product\".\"product_id\" = "
+ "\"sales_fact_1997\".\"product_id\"\n"
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1) AS \"t2\"\n"
+ "WHERE \"t2\".\"product_id\" > 100";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3811">[CALCITE-3811]
* JDBC adapter generates SQL with invalid field names if Filter's row type
* is different from its input</a>. */
@Test void testHavingAlias() {
final RelBuilder builder = relBuilder();
builder.scan("EMP")
.project(builder.alias(builder.field("DEPTNO"), "D"))
.aggregate(builder.groupKey(builder.field("D")),
builder.countStar("emps.count"))
.filter(
builder.call(SqlStdOperatorTable.LESS_THAN,
builder.field("emps.count"), builder.literal(2)));
final LogicalFilter filter = (LogicalFilter) builder.build();
assertThat(filter.getRowType().getFieldNames().toString(),
is("[D, emps.count]"));
// Create a LogicalAggregate similar to the input of filter, but with different
// field names.
final LogicalAggregate newAggregate =
(LogicalAggregate) builder.scan("EMP")
.project(builder.alias(builder.field("DEPTNO"), "D2"))
.aggregate(builder.groupKey(builder.field("D2")),
builder.countStar("emps.count"))
.build();
assertThat(newAggregate.getRowType().getFieldNames().toString(),
is("[D2, emps.count]"));
// Change filter's input. Its row type does not change.
filter.replaceInput(0, newAggregate);
assertThat(filter.getRowType().getFieldNames().toString(),
is("[D, emps.count]"));
final RelNode root =
builder.push(filter)
.project(builder.alias(builder.field("D"), "emps.deptno"))
.build();
final String expectedMysql = "SELECT `D2` AS `emps.deptno`\n"
+ "FROM (SELECT `DEPTNO` AS `D2`, COUNT(*) AS `emps.count`\n"
+ "FROM `scott`.`EMP`\n"
+ "GROUP BY `D2`\n"
+ "HAVING `emps.count` < 2) AS `t1`";
final String expectedPostgresql = "SELECT \"DEPTNO\" AS \"emps.deptno\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "GROUP BY \"DEPTNO\"\n"
+ "HAVING COUNT(*) < 2";
final String expectedBigQuery = "SELECT D2 AS `emps.deptno`\n"
+ "FROM (SELECT DEPTNO AS D2, COUNT(*) AS `emps.count`\n"
+ "FROM scott.EMP\n"
+ "GROUP BY D2\n"
+ "HAVING `emps.count` < 2) AS t1";
relFn(b -> root)
.withMysql().ok(expectedMysql)
.withPostgresql().ok(expectedPostgresql)
.withBigQuery().ok(expectedBigQuery);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3896">[CALCITE-3896]
* JDBC adapter, when generating SQL, changes target of ambiguous HAVING
* clause with a Project on Filter on Aggregate</a>.
*
* <p>The alias is ambiguous in dialects such as MySQL and BigQuery that
* have {@link SqlConformance#isHavingAlias()} = true. When the HAVING clause
* tries to reference a column, it sees the alias instead. */
@Test void testHavingAliasSameAsColumnIgnoringCase() {
checkHavingAliasSameAsColumn(true);
}
@Test void testHavingAliasSameAsColumn() {
checkHavingAliasSameAsColumn(false);
}
private void checkHavingAliasSameAsColumn(boolean upperAlias) {
final String alias = upperAlias ? "GROSS_WEIGHT" : "gross_weight";
final String query = "select \"product_id\" + 1,\n"
+ " sum(\"gross_weight\") as \"" + alias + "\"\n"
+ "from \"product\"\n"
+ "group by \"product_id\"\n"
+ "having sum(\"product\".\"gross_weight\") < 200";
// PostgreSQL has isHavingAlias=false, case-sensitive=true
final String expectedPostgresql = "SELECT \"product_id\" + 1,"
+ " SUM(\"gross_weight\") AS \"" + alias + "\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING SUM(\"gross_weight\") < 200";
// MySQL has isHavingAlias=true, case-sensitive=true
final String expectedMysql = "SELECT `product_id` + 1, `" + alias + "`\n"
+ "FROM (SELECT `product_id`, SUM(`gross_weight`) AS `" + alias + "`\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_id`\n"
+ "HAVING `" + alias + "` < 200) AS `t1`";
// BigQuery has isHavingAlias=true, case-sensitive=false
final String expectedBigQuery = upperAlias
? "SELECT product_id + 1, GROSS_WEIGHT\n"
+ "FROM (SELECT product_id, SUM(gross_weight) AS GROSS_WEIGHT\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id\n"
+ "HAVING GROSS_WEIGHT < 200) AS t1"
// Before [CALCITE-3896] was fixed, we got
// "HAVING SUM(gross_weight) < 200) AS t1"
// which on BigQuery gives you an error about aggregating aggregates
: "SELECT product_id + 1, gross_weight\n"
+ "FROM (SELECT product_id, SUM(gross_weight) AS gross_weight\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id\n"
+ "HAVING gross_weight < 200) AS t1";
sql(query)
.withPostgresql().ok(expectedPostgresql)
.withMysql().ok(expectedMysql)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testHaving4() {
final String query = "select \"product_id\"\n"
+ "from (\n"
+ " select \"product_id\", avg(\"gross_weight\") as agw\n"
+ " from \"product\"\n"
+ " where \"net_weight\" < 100\n"
+ " group by \"product_id\")\n"
+ "where agw > 50\n"
+ "group by \"product_id\"\n"
+ "having avg(agw) > 60\n";
final String expected = "SELECT \"product_id\"\n"
+ "FROM (SELECT \"product_id\", AVG(\"gross_weight\") AS \"AGW\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" < 100\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING AVG(\"gross_weight\") > 50) AS \"t2\"\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING AVG(\"AGW\") > 60";
sql(query).ok(expected);
}
@Test void testSelectQueryWithOrderByClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"net_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithOrderByClause1() {
String query =
"select \"product_id\", \"net_weight\" from \"product\" order by \"net_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithTwoOrderByClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"net_weight\", \"gross_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\","
+ " \"gross_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\", \"gross_weight\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithAscDescOrderByClause() {
String query = "select \"product_id\" from \"product\" "
+ "order by \"net_weight\" asc, \"gross_weight\" desc, \"low_fat\"";
final String expected = "SELECT"
+ " \"product_id\", \"net_weight\", \"gross_weight\", \"low_fat\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\", \"gross_weight\" DESC, \"low_fat\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3440">[CALCITE-3440]
* RelToSqlConverter does not properly alias ambiguous ORDER BY</a>. */
@Test void testOrderByColumnWithSameNameAsAlias() {
String query = "select \"product_id\" as \"p\",\n"
+ " \"net_weight\" as \"product_id\"\n"
+ "from \"product\"\n"
+ "order by 1";
final String expected = "SELECT \"product_id\" AS \"p\","
+ " \"net_weight\" AS \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"p\"";
sql(query).ok(expected);
}
@Test void testOrderByColumnWithSameNameAsAlias2() {
// We use ordinal "2" because the column name "product_id" is obscured
// by alias "product_id".
String query = "select \"net_weight\" as \"product_id\",\n"
+ " \"product_id\" as \"product_id\"\n"
+ "from \"product\"\n"
+ "order by \"product\".\"product_id\"";
final String expected = "SELECT \"net_weight\" AS \"product_id\","
+ " \"product_id\" AS \"product_id0\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id0\"";
final String expectedMysql = "SELECT `net_weight` AS `product_id`,"
+ " `product_id` AS `product_id0`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id0` IS NULL, `product_id0`";
sql(query).ok(expected)
.withMysql().ok(expectedMysql);
}
@Test void testHiveSelectCharset() {
String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10)) "
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT hire_date, CAST(hire_date AS VARCHAR(10))\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3282">[CALCITE-3282]
* HiveSqlDialect unparse Interger type as Int in order
* to be compatible with Hive1.x</a>. */
@Test void testHiveCastAsInt() {
String query = "select cast( cast(\"employee_id\" as varchar) as int) "
+ "from \"foodmart\".\"reserve_employee\" ";
final String expected = "SELECT CAST(CAST(employee_id AS VARCHAR) AS INT)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testBigQueryCast() {
String query = "select cast(cast(\"employee_id\" as varchar) as bigint), "
+ "cast(cast(\"employee_id\" as varchar) as smallint), "
+ "cast(cast(\"employee_id\" as varchar) as tinyint), "
+ "cast(cast(\"employee_id\" as varchar) as integer), "
+ "cast(cast(\"employee_id\" as varchar) as float), "
+ "cast(cast(\"employee_id\" as varchar) as char), "
+ "cast(cast(\"employee_id\" as varchar) as binary), "
+ "cast(cast(\"employee_id\" as varchar) as varbinary), "
+ "cast(cast(\"employee_id\" as varchar) as timestamp), "
+ "cast(cast(\"employee_id\" as varchar) as double), "
+ "cast(cast(\"employee_id\" as varchar) as decimal), "
+ "cast(cast(\"employee_id\" as varchar) as date), "
+ "cast(cast(\"employee_id\" as varchar) as time), "
+ "cast(cast(\"employee_id\" as varchar) as boolean) "
+ "from \"foodmart\".\"reserve_employee\" ";
final String expected = "SELECT CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS INT64), "
+ "CAST(CAST(employee_id AS STRING) AS FLOAT64), "
+ "CAST(CAST(employee_id AS STRING) AS STRING), "
+ "CAST(CAST(employee_id AS STRING) AS BYTES), "
+ "CAST(CAST(employee_id AS STRING) AS BYTES), "
+ "CAST(CAST(employee_id AS STRING) AS DATETIME), "
+ "CAST(CAST(employee_id AS STRING) AS FLOAT64), "
+ "CAST(CAST(employee_id AS STRING) AS NUMERIC), "
+ "CAST(CAST(employee_id AS STRING) AS DATE), "
+ "CAST(CAST(employee_id AS STRING) AS TIME), "
+ "CAST(CAST(employee_id AS STRING) AS BOOL)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withBigQuery().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3220">[CALCITE-3220]
* HiveSqlDialect should transform the SQL-standard TRIM function to TRIM,
* LTRIM or RTRIM</a>,
* <a href="https://issues.apache.org/jira/browse/CALCITE-3663">[CALCITE-3663]
* Support for TRIM function in BigQuery dialect</a>, and
* <a href="https://issues.apache.org/jira/browse/CALCITE-3771">[CALCITE-3771]
* Support of TRIM function for SPARK dialect and improvement in HIVE
* Dialect</a>. */
@Test void testHiveSparkAndBqTrim() {
final String query = "SELECT TRIM(' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
@Test void testHiveSparkAndBqTrimWithBoth() {
final String query = "SELECT TRIM(both ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM ' str ')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
@Test void testHiveSparkAndBqTrimWithLeading() {
final String query = "SELECT TRIM(LEADING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
@Test void testHiveSparkAndBqTrimWithTailing() {
final String query = "SELECT TRIM(TRAILING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3663">[CALCITE-3663]
* Support for TRIM function in BigQuery dialect</a>. */
@Test void testBqTrimWithLeadingChar() {
final String query = "SELECT TRIM(LEADING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM('abcd', 'a')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE('abcd', '^(a)*', '')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3771">[CALCITE-3771]
* Support of TRIM function for SPARK dialect and improvement in HIVE Dialect</a>. */
@Test void testHiveAndSparkTrimWithLeadingChar() {
final String query = "SELECT TRIM(LEADING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('abcd', '^(a)*', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING 'a' FROM 'abcd')\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test void testBqTrimWithBothChar() {
final String query = "SELECT TRIM(both 'a' from 'abcda')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('abcda', 'a')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test void testHiveAndSparkTrimWithBothChar() {
final String query = "SELECT TRIM(both 'a' from 'abcda')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('abcda', '^(a)*|(a)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH 'a' FROM 'abcda')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test void testHiveBqTrimWithTailingChar() {
final String query = "SELECT TRIM(TRAILING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM('abcd', 'a')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testTrim() {
final String query = "SELECT TRIM(\"full_name\")\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(full_name)\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM(\"full_name\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM full_name)\nFROM foodmart"
+ ".reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithBoth() {
final String query = "SELECT TRIM(both ' ' from \"full_name\")\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(full_name)\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM full_name)\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM(\"full_name\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedMsSql = "SELECT TRIM(' ' FROM [full_name])\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testTrimWithLeadingSpace() {
final String query = "SELECT TRIM(LEADING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING ' ' FROM ' str ')\nFROM foodmart"
+ ".reserve_employee";
final String expectedSnowFlake = "SELECT LTRIM(' str ')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedMsSql = "SELECT LTRIM(' str ')\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testTrimWithTailingSpace() {
final String query = "SELECT TRIM(TRAILING ' ' from ' str ')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM(' str ')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING ' ' FROM ' str ')"
+ "\nFROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT RTRIM(' str ')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
final String expectedMsSql = "SELECT RTRIM(' str ')\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testTrimWithLeadingCharacter() {
final String query = "SELECT TRIM(LEADING 'A' from \"first_name\")\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM(first_name, 'A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING 'A' FROM first_name)\nFROM foodmart"
+ ".reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE(first_name, '^(A)*', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT LTRIM(\"first_name\", 'A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithTrailingCharacter() {
final String query = "SELECT TRIM(TRAILING 'A' from 'AABCAADCAA')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM('AABCAADCAA', 'A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING 'A' FROM 'AABCAADCAA')\nFROM foodmart"
+ ".reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE('AABCAADCAA', '(A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT RTRIM('AABCAADCAA', 'A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithBothCharacter() {
final String query = "SELECT TRIM(BOTH 'A' from 'AABCAADCAA')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('AABCAADCAA', 'A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH 'A' FROM 'AABCAADCAA')\nFROM foodmart"
+ ".reserve_employee";
final String expectedHS = "SELECT REGEXP_REPLACE('AABCAADCAA', '^(A)*|(A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM('AABCAADCAA', 'A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithLeadingSpecialCharacter() {
final String query = "SELECT TRIM(LEADING 'A$@*' from 'A$@*AABCA$@*AADCAA$@*')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT LTRIM('A$@*AABCA$@*AADCAA$@*', 'A$@*')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT REGEXP_REPLACE('A$@*AABCA$@*AADCAA$@*', '^(A\\$\\@\\*)*', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(LEADING 'A$@*' FROM 'A$@*AABCA$@*AADCAA$@*')\nFROM"
+ " foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT LTRIM('A$@*AABCA$@*AADCAA$@*', 'A$@*')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithTrailingSpecialCharacter() {
final String query = "SELECT TRIM(TRAILING '$A@*' from '$A@*AABC$@*AADCAA$A@*')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT RTRIM('$A@*AABC$@*AADCAA$A@*', '$A@*')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT REGEXP_REPLACE('$A@*AABC$@*AADCAA$A@*', '(\\$A\\@\\*)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING '$A@*' FROM '$A@*AABC$@*AADCAA$A@*')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT RTRIM('$A@*AABC$@*AADCAA$A@*', '$A@*')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('$@*AABC$@*AADCAA$@*A', '$@*A')\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT REGEXP_REPLACE('$@*AABC$@*AADCAA$@*A',"
+ " '^(\\$\\@\\*A)*|(\\$\\@\\*A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH '$@*A' FROM '$@*AABC$@*AADCAA$@*A')\nFROM "
+ "foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM('$@*AABC$@*AADCAA$@*A', '$@*A')\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testTrimWithFunction() {
final String query = "SELECT TRIM(substring(\"full_name\" from 2 for 3))\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM(SUBSTR(full_name, 2, 3))\n"
+ "FROM foodmart.reserve_employee";
final String expectedHS =
"SELECT TRIM(SUBSTRING(full_name, 2, 3))\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH ' ' FROM SUBSTRING(full_name, 2, 3))\n"
+ "FROM foodmart.reserve_employee";
final String expectedSnowFlake = "SELECT TRIM(SUBSTR(\"full_name\", 2, 3))\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
sql(query)
.withHive()
.ok(expectedHS)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test void testHiveAndSparkTrimWithTailingChar() {
final String query = "SELECT TRIM(TRAILING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('abcd', '(a)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(TRAILING 'a' FROM 'abcd')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test void testBqTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT TRIM('$@*AABC$@*AADCAA$@*A', '$@*A')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test void testHiveAndSparkTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT REGEXP_REPLACE('$@*AABC$@*AADCAA$@*A',"
+ " '^(\\$\\@\\*A)*|(\\$\\@\\*A)*$', '')\n"
+ "FROM foodmart.reserve_employee";
final String expectedSpark = "SELECT TRIM(BOTH '$@*A' FROM '$@*AABC$@*AADCAA$@*A')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2715">[CALCITE-2715]
* MS SQL Server does not support character set as part of data type</a>. */
@Test void testMssqlCharacterSet() {
String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10))\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT [hire_date], CAST([hire_date] AS VARCHAR(10))\n"
+ "FROM [foodmart].[reserve_employee]";
sql(query).withMssql().ok(expected);
}
/**
* Tests that IN can be un-parsed.
*
* <p>This cannot be tested using "sql", because because Calcite's SQL parser
* replaces INs with ORs or sub-queries.
*/
@Test void testUnparseIn1() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.filter(b.in(b.field("DEPTNO"), b.literal(21)))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPTNO\" = 21";
relFn(relFn).ok(expectedSql);
}
@Test void testUnparseIn2() {
final Function<RelBuilder, RelNode> relFn = b -> b
.scan("EMP")
.filter(b.in(b.field("DEPTNO"), b.literal(20), b.literal(21)))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPTNO\" IN (20, 21)";
relFn(relFn).ok(expectedSql);
}
@Test void testUnparseInStruct1() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.filter(
b.in(
b.call(SqlStdOperatorTable.ROW,
b.field("DEPTNO"), b.field("JOB")),
b.call(SqlStdOperatorTable.ROW, b.literal(1),
b.literal("PRESIDENT"))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE ROW(\"DEPTNO\", \"JOB\") = ROW(1, 'PRESIDENT')";
relFn(relFn).ok(expectedSql);
}
@Test void testUnparseInStruct2() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.filter(
b.in(
b.call(SqlStdOperatorTable.ROW,
b.field("DEPTNO"), b.field("JOB")),
b.call(SqlStdOperatorTable.ROW, b.literal(1),
b.literal("PRESIDENT")),
b.call(SqlStdOperatorTable.ROW, b.literal(2),
b.literal("PRESIDENT"))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE ROW(\"DEPTNO\", \"JOB\") IN (ROW(1, 'PRESIDENT'), ROW(2, 'PRESIDENT'))";
relFn(relFn).ok(expectedSql);
}
@Test public void testScalarQueryWithBigQuery() {
final RelBuilder builder = relBuilder();
final RelNode scalarQueryRel = builder.
scan("DEPT")
.filter(builder.equals(builder.field("DEPTNO"), builder.literal(40)))
.project(builder.field(0))
.build();
final RelNode root = builder
.scan("EMP")
.aggregate(builder.groupKey("EMPNO"),
builder.aggregateCall(SqlStdOperatorTable.SINGLE_VALUE,
RexSubQuery.scalar(scalarQueryRel)).as("SC_DEPTNO"),
builder.count(builder.literal(1)).as("pid"))
.build();
final String expectedBigQuery = "SELECT EMPNO, (((SELECT DEPTNO\n"
+ "FROM scott.DEPT\n"
+ "WHERE DEPTNO = 40))) AS SC_DEPTNO, COUNT(1) AS pid\n"
+ "FROM scott.EMP\n"
+ "GROUP BY EMPNO";
final String expectedSnowflake = "SELECT \"EMPNO\", (((SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE \"DEPTNO\" = 40))) AS \"SC_DEPTNO\", COUNT(1) AS \"pid\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "GROUP BY \"EMPNO\"";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()),
isLinux(expectedBigQuery));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()),
isLinux(expectedSnowflake));
}
@Test void testSelectQueryWithLimitClause() {
String query = "select \"product_id\" from \"product\" limit 100 offset 10";
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "LIMIT 100\nOFFSET 10";
sql(query).withHive().ok(expected);
}
@Test void testPositionFunctionForHive() {
final String query = "select position('A' IN 'ABC') from \"product\"";
final String expected = "SELECT INSTR('ABC', 'A')\n"
+ "FROM foodmart.product";
sql(query).withHive().ok(expected);
}
@Test void testPositionFunctionForBigQuery() {
final String query = "select position('A' IN 'ABC') from \"product\"";
final String expected = "SELECT STRPOS('ABC', 'A')\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test void testPositionFunctionWithSlashForBigQuery() {
final String query = "select position('\\,' IN 'ABC') from \"product\"";
final String expected = "SELECT STRPOS('ABC', '\\\\,')\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
/** Tests that we escape single-quotes in character literals using back-slash
* in BigQuery. The norm is to escape single-quotes with single-quotes. */
@Test void testCharLiteralForBigQuery() {
final String query = "select 'that''s all folks!' from \"product\"";
final String expectedPostgresql = "SELECT 'that''s all folks!'\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedBigQuery = "SELECT 'that\\'s all folks!'\n"
+ "FROM foodmart.product";
sql(query)
.withPostgresql().ok(expectedPostgresql)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testIdentifier() {
// Note that IGNORE is reserved in BigQuery but not in standard SQL
final String query = "select *\n"
+ "from (\n"
+ " select 1 as \"one\", 2 as \"tWo\", 3 as \"THREE\",\n"
+ " 4 as \"fo$ur\", 5 as \"ignore\"\n"
+ " from \"foodmart\".\"days\") as \"my$table\"\n"
+ "where \"one\" < \"tWo\" and \"THREE\" < \"fo$ur\"";
final String expectedBigQuery = "SELECT *\n"
+ "FROM (SELECT 1 AS one, 2 AS tWo, 3 AS THREE,"
+ " 4 AS `fo$ur`, 5 AS `ignore`\n"
+ "FROM foodmart.days) AS t\n"
+ "WHERE one < tWo AND THREE < `fo$ur`";
final String expectedMysql = "SELECT *\n"
+ "FROM (SELECT 1 AS `one`, 2 AS `tWo`, 3 AS `THREE`,"
+ " 4 AS `fo$ur`, 5 AS `ignore`\n"
+ "FROM `foodmart`.`days`) AS `t`\n"
+ "WHERE `one` < `tWo` AND `THREE` < `fo$ur`";
final String expectedPostgresql = "SELECT *\n"
+ "FROM (SELECT 1 AS \"one\", 2 AS \"tWo\", 3 AS \"THREE\","
+ " 4 AS \"fo$ur\", 5 AS \"ignore\"\n"
+ "FROM \"foodmart\".\"days\") AS \"t\"\n"
+ "WHERE \"one\" < \"tWo\" AND \"THREE\" < \"fo$ur\"";
final String expectedOracle = expectedPostgresql.replace(" AS ", " ");
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
.withPostgresql().ok(expectedPostgresql);
}
@Test void testModFunctionForHive() {
final String query = "select mod(11,3) from \"product\"";
final String expected = "SELECT 11 % 3\n"
+ "FROM foodmart.product";
sql(query).withHive().ok(expected);
}
@Test void testUnionOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "UNION select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "UNION DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test void testUnionAllOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "UNION ALL select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "UNION ALL\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test void testIntersectOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "INTERSECT select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test public void testIntersectOrderBy() {
final String query = "select * from (select \"product_id\" from \"product\"\n"
+ "INTERSECT select \"product_id\" from \"product\") t order by t.\"product_id\"";
final String expectedBigQuery = "SELECT *\n"
+ "FROM (SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT product_id\n"
+ "FROM foodmart.product) AS t1\n"
+ "ORDER BY product_id IS NULL, product_id";
sql(query).withBigQuery().ok(expectedBigQuery);
}
@Test public void testIntersectWithWhere() {
final String query = "select * from (select \"product_id\" from \"product\"\n"
+ "INTERSECT select \"product_id\" from \"product\") t where t.\"product_id\"<=14";
final String expectedBigQuery = "SELECT *\n"
+ "FROM (SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT product_id\n"
+ "FROM foodmart.product) AS t1\n"
+ "WHERE product_id <= 14";
sql(query).withBigQuery().ok(expectedBigQuery);
}
@Test public void testIntersectWithGroupBy() {
final String query = "select * from (select \"product_id\" from \"product\"\n"
+ "INTERSECT select \"product_id\" from \"product\") t group by \"product_id\"";
final String expectedBigQuery = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT product_id\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expectedBigQuery);
}
@Test public void testExceptOperatorForBigQuery() {
final String query = "select mod(11,3) from \"product\"\n"
+ "EXCEPT select 1 from \"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "EXCEPT DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
sql(query).withBigQuery().ok(expected);
}
@Test public void testSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
// Hive and MSSQL do not support NULLS FIRST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL DESC, product_id DESC";
final String expectedSpark = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC NULLS FIRST";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 0 ELSE 1 END, [product_id] DESC";
sql(query)
.withSpark()
.ok(expectedSpark)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test void testSelectOrderByDescNullsFirst() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
// Hive and MSSQL do not support NULLS FIRST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL DESC, product_id DESC";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 0 ELSE 1 END, [product_id] DESC";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test void testSelectOrderByAscNullsLast() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
// Hive and MSSQL do not support NULLS LAST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL, product_id";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test public void testSelectQueryWithOrderByAscAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
// Hive and MSSQL do not support NULLS LAST, so need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL, product_id";
final String expectedSpark = "SELECT product_id\nFROM foodmart.product\n"
+ "ORDER BY product_id NULLS LAST";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
sql(query)
.withSpark()
.ok(expectedSpark)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test public void testSelectQueryWithOrderByAscNullsFirstShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
// Hive and MSSQL do not support NULLS FIRST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id]";
sql(query)
.withSpark()
.ok(expected)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test void testSelectOrderByAscNullsFirst() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
// Hive and MSSQL do not support NULLS FIRST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id]";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test public void testSelectQueryWithOrderByDescNullsLastShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
// Hive and MSSQL do not support NULLS LAST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id] DESC";
sql(query)
.withSpark()
.ok(expected)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(expectedMssql);
}
@Test void testSelectOrderByDescNullsLast() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
// Hive and MSSQL do not support NULLS LAST, but nulls sort low, so no
// need to emulate
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC";
final String mssqlExpected = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id] DESC";
sql(query)
.dialect(HiveSqlDialect.DEFAULT).ok(expected)
.dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@Test void testHiveSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY hire_date IS NULL DESC, hire_date DESC)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testHiveSelectQueryWithOverAscAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date IS NULL, hire_date)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testHiveSelectQueryWithOverAscNullsFirstShouldNotAddNullEmulation() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testCharLengthFunctionEmulationForHiveAndBigqueryAndSpark() {
final String query = "select char_length('xyz') from \"product\"";
final String expected = "SELECT LENGTH('xyz')\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT LENGTH('xyz')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withSpark()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testCharacterLengthFunctionEmulationForHiveAndBigqueryAndSpark() {
final String query = "select character_length('xyz') from \"product\"";
final String expected = "SELECT LENGTH('xyz')\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT LENGTH('xyz')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withHive()
.ok(expected)
.withBigQuery()
.ok(expected)
.withSpark()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test void testHiveSubstringWithLength() {
String query = "SELECT SUBSTRING('ABC', 2, 3)"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2, 3)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testHiveSubstringWithANSI() {
String query = "SELECT SUBSTRING('ABC' FROM 2)"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testHiveSubstringWithANSIAndLength() {
String query = "SELECT SUBSTRING('ABC' FROM 2 FOR 3)"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2, 3)\n"
+ "FROM foodmart.reserve_employee";
sql(query).withHive().ok(expected);
}
@Test void testHiveSelectQueryWithOverDescNullsLastShouldNotAddNullEmulation() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date DESC)\n"
+ "FROM foodmart.employee";
sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
}
@Test void testMysqlCastToBigint() {
// MySQL does not allow cast to BIGINT; instead cast to SIGNED.
final String query = "select cast(\"product_id\" as bigint) from \"product\"";
final String expected = "SELECT CAST(`product_id` AS SIGNED)\n"
+ "FROM `foodmart`.`product`";
sql(query).withMysql().ok(expected);
}
@Test void testMysqlCastToInteger() {
// MySQL does not allow cast to INTEGER; instead cast to SIGNED.
final String query = "select \"employee_id\",\n"
+ " cast(\"salary_paid\" * 10000 as integer)\n"
+ "from \"salary\"";
final String expected = "SELECT `employee_id`,"
+ " CAST(`salary_paid` * 10000 AS SIGNED)\n"
+ "FROM `foodmart`.`salary`";
sql(query).withMysql().ok(expected);
}
@Test void testHiveSelectQueryWithOrderByDescAndHighNullsWithVersionGreaterThanOrEq21() {
final HiveSqlDialect hive2_1Dialect =
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(1)
.withNullCollation(NullCollation.LOW));
final HiveSqlDialect hive2_2_Dialect =
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(2)
.withNullCollation(NullCollation.LOW));
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id DESC NULLS FIRST";
sql(query).dialect(hive2_1Dialect).ok(expected);
sql(query).dialect(hive2_2_Dialect).ok(expected);
}
@Test void testHiveSelectQueryWithOverDescAndHighNullsWithVersionGreaterThanOrEq21() {
final HiveSqlDialect hive2_1Dialect =
new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(1)
.withNullCollation(NullCollation.LOW));
final HiveSqlDialect hive2_2_Dialect =
new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(2)
.withNullCollation(NullCollation.LOW));
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date DESC NULLS FIRST)\n"
+ "FROM foodmart.employee";
sql(query).dialect(hive2_1Dialect).ok(expected);
sql(query).dialect(hive2_2_Dialect).ok(expected);
}
@Test void testHiveSelectQueryWithOrderByDescAndHighNullsWithVersion20() {
final HiveSqlDialect hive2_1_0_Dialect =
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(0)
.withNullCollation(NullCollation.LOW));
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id IS NULL DESC, product_id DESC";
sql(query).dialect(hive2_1_0_Dialect).ok(expected);
}
@Test void testHiveSelectQueryWithOverDescAndHighNullsWithVersion20() {
final HiveSqlDialect hive2_1_0_Dialect =
new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(0)
.withNullCollation(NullCollation.LOW));
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY hire_date IS NULL DESC, hire_date DESC)\n"
+ "FROM foodmart.employee";
sql(query).dialect(hive2_1_0_Dialect).ok(expected);
}
@Test void testJethroDataSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\", \"product_id\" DESC";
sql(query).dialect(jethroDataSqlDialect()).ok(expected);
}
@Test void testJethroDataSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY \"hire_date\", \"hire_date\" DESC)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).dialect(jethroDataSqlDialect()).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByAscAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverAscAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByAscNullsFirstShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverAscNullsFirstShouldNotAddNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOrderByDescNullsLastShouldNotAddNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlSelectQueryWithOverDescNullsLastShouldNotAddNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
}
@Test void testMySqlCastToVarcharWithLessThanMaxPrecision() {
final String query = "select cast(\"product_id\" as varchar(50)), \"product_id\" "
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS CHAR(50)), `product_id`\n"
+ "FROM `foodmart`.`product`";
sql(query).withMysql().ok(expected);
}
@Test void testMySqlCastToTimestamp() {
final String query = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > cast(\"hire_date\" as TIMESTAMP) ";
final String expected = "SELECT *\nFROM `foodmart`.`employee`"
+ "\nWHERE (`hire_date` - INTERVAL '19800' SECOND) > CAST(`hire_date` AS DATETIME)";
sql(query).withMysql().ok(expected);
}
@Test void testMySqlCastToVarcharWithGreaterThanMaxPrecision() {
final String query = "select cast(\"product_id\" as varchar(500)), \"product_id\" "
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS CHAR(255)), `product_id`\n"
+ "FROM `foodmart`.`product`";
sql(query).withMysql().ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByAscNullsLastAndNoEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverAscNullsLastAndNoEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByAscNullsFirstAndNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverAscNullsFirstAndNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByDescNullsFirstAndNoEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverDescNullsFirstAndNoEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOrderByDescNullsLastAndNullEmulation() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithHighNullsSelectWithOverDescNullsLastAndNullEmulation() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByDescAndNullsFirstShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverDescAndNullsFirstShouldNotBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByAscAndNullsFirstShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverAscAndNullsFirstShouldNotBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByDescAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverDescAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOrderByAscAndNullsLastShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithFirstNullsSelectWithOverAscAndNullsLastShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByDescAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverDescAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByAscAndNullsFirstShouldBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverAscAndNullsFirstShouldBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" nulls first) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByDescAndNullsLastShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverDescAndNullsLastShouldNotBeEmulated() {
final String query = "SELECT row_number() "
+ "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOrderByAscAndNullsLastShouldNotBeEmulated() {
final String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testMySqlWithLastNullsSelectWithOverAscAndNullsLastShouldNotBeEmulated() {
final String query = "SELECT row_number() over "
+ "(order by \"hire_date\" nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
}
@Test void testCastToVarchar() {
String query = "select cast(\"product_id\" as varchar) from \"product\"";
final String expectedClickHouse = "SELECT CAST(`product_id` AS `String`)\n"
+ "FROM `foodmart`.`product`";
final String expectedMysql = "SELECT CAST(`product_id` AS CHAR)\n"
+ "FROM `foodmart`.`product`";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withMysql()
.ok(expectedMysql);
}
@Test void testSelectQueryWithLimitClauseWithoutOrder() {
String query = "select \"product_id\" from \"product\" limit 100 offset 10";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
final String expectedClickHouse = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "LIMIT 10, 100";
sql(query)
.ok(expected)
.withClickHouse()
.ok(expectedClickHouse);
final String expectedPresto = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "OFFSET 10\n"
+ "LIMIT 100";
sql(query)
.ok(expected)
.withPresto()
.ok(expectedPresto);
}
@Test void testSelectQueryWithLimitOffsetClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"net_weight\" asc limit 100 offset 10";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
// BigQuery uses LIMIT/OFFSET, and nulls sort low by default
final String expectedBigQuery = "SELECT product_id, net_weight\n"
+ "FROM foodmart.product\n"
+ "ORDER BY net_weight IS NULL, net_weight\n"
+ "LIMIT 100\n"
+ "OFFSET 10";
sql(query).ok(expected)
.withBigQuery().ok(expectedBigQuery);
}
@Test void testSelectQueryWithParameters() {
String query = "select * from \"product\" "
+ "where \"product_id\" = ? "
+ "AND ? >= \"shelf_width\"";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = ? "
+ "AND ? >= \"shelf_width\"";
sql(query).ok(expected);
}
@Test void testSelectQueryWithFetchOffsetClause() {
String query = "select \"product_id\" from \"product\"\n"
+ "order by \"product_id\" offset 10 rows fetch next 100 rows only";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
sql(query).ok(expected);
}
@Test void testSelectQueryWithFetchClause() {
String query = "select \"product_id\"\n"
+ "from \"product\"\n"
+ "order by \"product_id\" fetch next 100 rows only";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "FETCH NEXT 100 ROWS ONLY";
final String expectedMssql10 = "SELECT TOP (100) [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
final String expectedMssql = "SELECT [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]\n"
+ "FETCH NEXT 100 ROWS ONLY";
final String expectedSybase = "SELECT TOP (100) product_id\n"
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
sql(query).ok(expected)
.withMssql(10).ok(expectedMssql10)
.withMssql(11).ok(expectedMssql)
.withMssql(14).ok(expectedMssql)
.withSybase().ok(expectedSybase);
}
@Test void testSelectQueryComplex() {
String query =
"select count(*), \"units_per_case\" from \"product\" where \"cases_per_pallet\" > 100 "
+ "group by \"product_id\", \"units_per_case\" order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), \"units_per_case\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"cases_per_pallet\" > 100\n"
+ "GROUP BY \"product_id\", \"units_per_case\"\n"
+ "ORDER BY \"units_per_case\" DESC";
sql(query).ok(expected);
}
@Test void testSelectQueryWithGroup() {
String query = "select"
+ " count(*), sum(\"employee_id\") from \"reserve_employee\" "
+ "where \"hire_date\" > '2015-01-01' "
+ "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM') "
+ "group by \"store_id\", \"position_title\"";
final String expected = "SELECT COUNT(*), SUM(\"employee_id\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"\n"
+ "WHERE \"hire_date\" > '2015-01-01' "
+ "AND (\"position_title\" = 'SDE' OR \"position_title\" = 'SDM')\n"
+ "GROUP BY \"store_id\", \"position_title\"";
sql(query).ok(expected);
}
@Test void testSimpleJoin() {
String query = "select *\n"
+ "from \"sales_fact_1997\" as s\n"
+ "join \"customer\" as c on s.\"customer_id\" = c.\"customer_id\"\n"
+ "join \"product\" as p on s.\"product_id\" = p.\"product_id\"\n"
+ "join \"product_class\" as pc\n"
+ " on p.\"product_class_id\" = pc.\"product_class_id\"\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'\n";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\""
+ ".\"customer_id\"\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\"\n"
+ "INNER JOIN \"foodmart\".\"product_class\" "
+ "ON \"product\".\"product_class_id\" = \"product_class\""
+ ".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' AND "
+ "\"product_class\".\"product_department\" = 'Snacks'";
sql(query).ok(expected);
}
@Test void testSimpleJoinUsing() {
String query = "select *\n"
+ "from \"sales_fact_1997\" as s\n"
+ " join \"customer\" as c using (\"customer_id\")\n"
+ " join \"product\" as p using (\"product_id\")\n"
+ " join \"product_class\" as pc using (\"product_class_id\")\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'\n";
final String expected = "SELECT"
+ " \"product\".\"product_class_id\","
+ " \"sales_fact_1997\".\"product_id\","
+ " \"sales_fact_1997\".\"customer_id\","
+ " \"sales_fact_1997\".\"time_id\","
+ " \"sales_fact_1997\".\"promotion_id\","
+ " \"sales_fact_1997\".\"store_id\","
+ " \"sales_fact_1997\".\"store_sales\","
+ " \"sales_fact_1997\".\"store_cost\","
+ " \"sales_fact_1997\".\"unit_sales\","
+ " \"customer\".\"account_num\","
+ " \"customer\".\"lname\","
+ " \"customer\".\"fname\","
+ " \"customer\".\"mi\","
+ " \"customer\".\"address1\","
+ " \"customer\".\"address2\","
+ " \"customer\".\"address3\","
+ " \"customer\".\"address4\","
+ " \"customer\".\"city\","
+ " \"customer\".\"state_province\","
+ " \"customer\".\"postal_code\","
+ " \"customer\".\"country\","
+ " \"customer\".\"customer_region_id\","
+ " \"customer\".\"phone1\","
+ " \"customer\".\"phone2\","
+ " \"customer\".\"birthdate\","
+ " \"customer\".\"marital_status\","
+ " \"customer\".\"yearly_income\","
+ " \"customer\".\"gender\","
+ " \"customer\".\"total_children\","
+ " \"customer\".\"num_children_at_home\","
+ " \"customer\".\"education\","
+ " \"customer\".\"date_accnt_opened\","
+ " \"customer\".\"member_card\","
+ " \"customer\".\"occupation\","
+ " \"customer\".\"houseowner\","
+ " \"customer\".\"num_cars_owned\","
+ " \"customer\".\"fullname\","
+ " \"product\".\"brand_name\","
+ " \"product\".\"product_name\","
+ " \"product\".\"SKU\","
+ " \"product\".\"SRP\","
+ " \"product\".\"gross_weight\","
+ " \"product\".\"net_weight\","
+ " \"product\".\"recyclable_package\","
+ " \"product\".\"low_fat\","
+ " \"product\".\"units_per_case\","
+ " \"product\".\"cases_per_pallet\","
+ " \"product\".\"shelf_width\","
+ " \"product\".\"shelf_height\","
+ " \"product\".\"shelf_depth\","
+ " \"product_class\".\"product_subcategory\","
+ " \"product_class\".\"product_category\","
+ " \"product_class\".\"product_department\","
+ " \"product_class\".\"product_family\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\""
+ ".\"customer_id\"\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\"\n"
+ "INNER JOIN \"foodmart\".\"product_class\" "
+ "ON \"product\".\"product_class_id\" = \"product_class\""
+ ".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' AND "
+ "\"product_class\".\"product_department\" = 'Snacks'";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1636">[CALCITE-1636]
* JDBC adapter generates wrong SQL for self join with sub-query</a>. */
@Test void testSubQueryAlias() {
String query = "select t1.\"customer_id\", t2.\"customer_id\"\n"
+ "from (select \"customer_id\" from \"sales_fact_1997\") as t1\n"
+ "inner join (select \"customer_id\" from \"sales_fact_1997\") t2\n"
+ "on t1.\"customer_id\" = t2.\"customer_id\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT sales_fact_1997.customer_id\n"
+ "FROM foodmart.sales_fact_1997 AS sales_fact_1997) AS t\n"
+ "INNER JOIN (SELECT sales_fact_19970.customer_id\n"
+ "FROM foodmart.sales_fact_1997 AS sales_fact_19970) AS t0 ON t.customer_id = t0"
+ ".customer_id";
sql(query).withDb2().ok(expected);
}
@Test void testCartesianProductWithCommaSyntax() {
String query = "select * from \"department\" , \"employee\"";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\",\n"
+ "\"foodmart\".\"employee\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2652">[CALCITE-2652]
* SqlNode to SQL conversion fails if the join condition references a BOOLEAN
* column</a>. */
@Test void testJoinOnBoolean() {
final String sql = "SELECT 1\n"
+ "from emps\n"
+ "join emp on (emp.deptno = emps.empno and manager)";
final String s = sql(sql).schema(CalciteAssert.SchemaSpec.POST).exec();
assertThat(s, notNullValue()); // sufficient that conversion did not throw
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-4249">[CALCITE-4249]
* JDBC adapter cannot translate NOT LIKE in join condition</a>. */
@Test void testJoinOnNotLike() {
final Function<RelBuilder, RelNode> relFn = b ->
b.scan("EMP")
.scan("DEPT")
.join(JoinRelType.LEFT,
b.and(
b.equals(b.field(2, 0, "DEPTNO"),
b.field(2, 1, "DEPTNO")),
b.not(
b.call(SqlStdOperatorTable.LIKE,
b.field(2, 1, "DNAME"),
b.literal("ACCOUNTING")))))
.build();
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "LEFT JOIN \"scott\".\"DEPT\" "
+ "ON \"EMP\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" "
+ "AND \"DEPT\".\"DNAME\" NOT LIKE 'ACCOUNTING'";
relFn(relFn).ok(expectedSql);
}
@Test void testCartesianProductWithInnerJoinSyntax() {
String query = "select * from \"department\"\n"
+ "INNER JOIN \"employee\" ON TRUE";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\",\n"
+ "\"foodmart\".\"employee\"";
sql(query).ok(expected);
}
@Test void testFullJoinOnTrueCondition() {
String query = "select * from \"department\"\n"
+ "FULL JOIN \"employee\" ON TRUE";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\"\n"
+ "FULL JOIN \"foodmart\".\"employee\" ON TRUE";
sql(query).ok(expected);
}
@Disabled
@Test void testCaseOnSubQuery() {
String query = "SELECT CASE WHEN v.g IN (0, 1) THEN 0 ELSE 1 END\n"
+ "FROM (SELECT * FROM \"foodmart\".\"customer\") AS c,\n"
+ " (SELECT 0 AS g) AS v\n"
+ "GROUP BY v.g";
final String expected = "SELECT"
+ " CASE WHEN \"t0\".\"G\" IN (0, 1) THEN 0 ELSE 1 END\n"
+ "FROM (SELECT *\nFROM \"foodmart\".\"customer\") AS \"t\",\n"
+ "(VALUES (0)) AS \"t0\" (\"G\")\n"
+ "GROUP BY \"t0\".\"G\"";
sql(query).ok(expected);
}
@Test void testSimpleIn() {
String query = "select * from \"department\" where \"department_id\" in (\n"
+ " select \"department_id\" from \"employee\"\n"
+ " where \"store_id\" < 150)";
final String expected = "SELECT "
+ "\"department\".\"department_id\", \"department\""
+ ".\"department_description\"\n"
+ "FROM \"foodmart\".\"department\"\nINNER JOIN "
+ "(SELECT \"department_id\"\nFROM \"foodmart\".\"employee\"\n"
+ "WHERE \"store_id\" < 150\nGROUP BY \"department_id\") AS \"t1\" "
+ "ON \"department\".\"department_id\" = \"t1\".\"department_id\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1332">[CALCITE-1332]
* DB2 should always use aliases for tables: x.y.z AS z</a>. */
@Test void testDb2DialectJoinStar() {
String query = "select * "
+ "from \"foodmart\".\"employee\" A "
+ "join \"foodmart\".\"department\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelfJoinStar() {
String query = "select * "
+ "from \"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectJoin() {
String query = "select A.\"employee_id\", B.\"department_id\" "
+ "from \"foodmart\".\"employee\" A join \"foodmart\".\"department\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT"
+ " employee.employee_id, department.department_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelfJoin() {
String query = "select A.\"employee_id\", B.\"employee_id\" from "
+ "\"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT"
+ " employee.employee_id, employee0.employee_id AS employee_id0\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectWhere() {
String query = "select A.\"employee_id\" from "
+ "\"foodmart\".\"employee\" A where A.\"department_id\" < 1000";
final String expected = "SELECT employee.employee_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE employee.department_id < 1000";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectJoinWhere() {
String query = "select A.\"employee_id\", B.\"department_id\" "
+ "from \"foodmart\".\"employee\" A join \"foodmart\".\"department\" B\n"
+ "on A.\"department_id\" = B.\"department_id\" "
+ "where A.\"employee_id\" < 1000";
final String expected = "SELECT"
+ " employee.employee_id, department.department_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id\n"
+ "WHERE employee.employee_id < 1000";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelfJoinWhere() {
String query = "select A.\"employee_id\", B.\"employee_id\" from "
+ "\"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\" "
+ "where B.\"employee_id\" < 2000";
final String expected = "SELECT "
+ "employee.employee_id, employee0.employee_id AS employee_id0\n"
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id\n"
+ "WHERE employee0.employee_id < 2000";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectCast() {
String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10)) "
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT reserve_employee.hire_date, "
+ "CAST(reserve_employee.hire_date AS VARCHAR(10))\n"
+ "FROM foodmart.reserve_employee AS reserve_employee";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelectQueryWithGroupByHaving() {
String query = "select count(*) from \"product\" "
+ "group by \"product_class_id\", \"product_id\" "
+ "having \"product_id\" > 10";
final String expected = "SELECT COUNT(*)\n"
+ "FROM foodmart.product AS product\n"
+ "GROUP BY product.product_class_id, product.product_id\n"
+ "HAVING product.product_id > 10";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelectQueryComplex() {
String query = "select count(*), \"units_per_case\" "
+ "from \"product\" where \"cases_per_pallet\" > 100 "
+ "group by \"product_id\", \"units_per_case\" "
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), product.units_per_case\n"
+ "FROM foodmart.product AS product\n"
+ "WHERE product.cases_per_pallet > 100\n"
+ "GROUP BY product.product_id, product.units_per_case\n"
+ "ORDER BY product.units_per_case DESC";
sql(query).withDb2().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-4090">[CALCITE-4090]
* DB2 aliasing breaks with a complex SELECT above a sub-query</a>. */
@Test void testDb2SubQueryAlias() {
String query = "select count(foo), \"units_per_case\"\n"
+ "from (select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
+ " from \"product\")\n"
+ "where \"cases_per_pallet\" > 100\n"
+ "group by \"product_id\", \"units_per_case\"\n"
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), t.units_per_case\n"
+ "FROM (SELECT product.units_per_case, product.cases_per_pallet, "
+ "product.product_id, 1 AS FOO\n"
+ "FROM foodmart.product AS product) AS t\n"
+ "WHERE t.cases_per_pallet > 100\n"
+ "GROUP BY t.product_id, t.units_per_case\n"
+ "ORDER BY t.units_per_case DESC";
sql(query).withDb2().ok(expected);
}
@Test void testDb2SubQueryFromUnion() {
String query = "select count(foo), \"units_per_case\"\n"
+ "from (select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
+ " from \"product\"\n"
+ " where \"cases_per_pallet\" > 100\n"
+ " union all\n"
+ " select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
+ " from \"product\"\n"
+ " where \"cases_per_pallet\" < 100)\n"
+ "where \"cases_per_pallet\" > 100\n"
+ "group by \"product_id\", \"units_per_case\"\n"
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), t3.units_per_case\n"
+ "FROM (SELECT product.units_per_case, product.cases_per_pallet, "
+ "product.product_id, 1 AS FOO\n"
+ "FROM foodmart.product AS product\n"
+ "WHERE product.cases_per_pallet > 100\n"
+ "UNION ALL\n"
+ "SELECT product0.units_per_case, product0.cases_per_pallet, "
+ "product0.product_id, 1 AS FOO\n"
+ "FROM foodmart.product AS product0\n"
+ "WHERE product0.cases_per_pallet < 100) AS t3\n"
+ "WHERE t3.cases_per_pallet > 100\n"
+ "GROUP BY t3.product_id, t3.units_per_case\n"
+ "ORDER BY t3.units_per_case DESC";
sql(query).withDb2().ok(expected);
}
@Test void testDb2DialectSelectQueryWithGroup() {
String query = "select count(*), sum(\"employee_id\") "
+ "from \"reserve_employee\" "
+ "where \"hire_date\" > '2015-01-01' "
+ "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM') "
+ "group by \"store_id\", \"position_title\"";
final String expected = "SELECT"
+ " COUNT(*), SUM(reserve_employee.employee_id)\n"
+ "FROM foodmart.reserve_employee AS reserve_employee\n"
+ "WHERE reserve_employee.hire_date > '2015-01-01' "
+ "AND (reserve_employee.position_title = 'SDE' OR "
+ "reserve_employee.position_title = 'SDM')\n"
+ "GROUP BY reserve_employee.store_id, reserve_employee.position_title";
sql(query).withDb2().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1372">[CALCITE-1372]
* JDBC adapter generates SQL with wrong field names</a>. */
@Test void testJoinPlan2() {
final String sql = "SELECT v1.deptno, v2.deptno\n"
+ "FROM dept v1 LEFT JOIN emp v2 ON v1.deptno = v2.deptno\n"
+ "WHERE v2.job LIKE 'PRESIDENT'";
final String expected = "SELECT \"DEPT\".\"DEPTNO\","
+ " \"EMP\".\"DEPTNO\" AS \"DEPTNO0\"\n"
+ "FROM \"SCOTT\".\"DEPT\"\n"
+ "LEFT JOIN \"SCOTT\".\"EMP\""
+ " ON \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\"\n"
+ "WHERE \"EMP\".\"JOB\" LIKE 'PRESIDENT'";
// DB2 does not have implicit aliases, so generates explicit "AS DEPT"
// and "AS EMP"
final String expectedDb2 = "SELECT DEPT.DEPTNO, EMP.DEPTNO AS DEPTNO0\n"
+ "FROM SCOTT.DEPT AS DEPT\n"
+ "LEFT JOIN SCOTT.EMP AS EMP ON DEPT.DEPTNO = EMP.DEPTNO\n"
+ "WHERE EMP.JOB LIKE 'PRESIDENT'";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected)
.withDb2()
.ok(expectedDb2);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1422">[CALCITE-1422]
* In JDBC adapter, allow IS NULL and IS NOT NULL operators in generated SQL
* join condition</a>. */
@Test void testSimpleJoinConditionWithIsNullOperators() {
String query = "select *\n"
+ "from \"foodmart\".\"sales_fact_1997\" as \"t1\"\n"
+ "inner join \"foodmart\".\"customer\" as \"t2\"\n"
+ "on \"t1\".\"customer_id\" = \"t2\".\"customer_id\" or "
+ "(\"t1\".\"customer_id\" is null "
+ "and \"t2\".\"customer_id\" is null) or\n"
+ "\"t2\".\"occupation\" is null\n"
+ "inner join \"foodmart\".\"product\" as \"t3\"\n"
+ "on \"t1\".\"product_id\" = \"t3\".\"product_id\" or "
+ "(\"t1\".\"product_id\" is not null or "
+ "\"t3\".\"product_id\" is not null)";
// Some of the "IS NULL" and "IS NOT NULL" are reduced to TRUE or FALSE,
// but not all.
String expected = "SELECT *\nFROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\".\"customer_id\""
+ " OR FALSE AND FALSE"
+ " OR \"customer\".\"occupation\" IS NULL\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\""
+ " OR TRUE"
+ " OR TRUE";
// The hook prevents RelBuilder from removing "FALSE AND FALSE" and such
try (Hook.Closeable ignore =
Hook.REL_BUILDER_SIMPLIFY.addThread(Hook.propertyJ(false))) {
sql(query).ok(expected);
}
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1586">[CALCITE-1586]
* JDBC adapter generates wrong SQL if UNION has more than two inputs</a>. */
@Test void testThreeQueryUnion() {
String query = "SELECT \"product_id\" FROM \"product\" "
+ " UNION ALL "
+ "SELECT \"product_id\" FROM \"sales_fact_1997\" "
+ " UNION ALL "
+ "SELECT \"product_class_id\" AS product_id FROM \"product_class\"";
String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
+ "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "UNION ALL\n"
+ "SELECT \"product_class_id\" AS \"PRODUCT_ID\"\n"
+ "FROM \"foodmart\".\"product_class\"";
final RuleSet rules = RuleSets.ofList(CoreRules.UNION_MERGE);
sql(query)
.optimize(rules, null)
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1800">[CALCITE-1800]
* JDBC adapter fails to SELECT FROM a UNION query</a>. */
@Test void testUnionWrappedInASelect() {
final String query = "select sum(\n"
+ " case when \"product_id\"=0 then \"net_weight\" else 0 end)"
+ " as net_weight\n"
+ "from (\n"
+ " select \"product_id\", \"net_weight\"\n"
+ " from \"product\"\n"
+ " union all\n"
+ " select \"product_id\", 0 as \"net_weight\"\n"
+ " from \"sales_fact_1997\") t0";
final String expected = "SELECT SUM(CASE WHEN \"product_id\" = 0"
+ " THEN \"net_weight\" ELSE 0 END) AS \"NET_WEIGHT\"\n"
+ "FROM (SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
+ "SELECT \"product_id\", 0 AS \"net_weight\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\") AS \"t1\"";
sql(query).ok(expected);
}
@Test void testLiteral() {
checkLiteral("DATE '1978-05-02'");
checkLiteral2("DATE '1978-5-2'", "DATE '1978-05-02'");
checkLiteral("TIME '12:34:56'");
checkLiteral("TIME '12:34:56.78'");
checkLiteral2("TIME '1:4:6.080'", "TIME '01:04:06.080'");
checkLiteral("TIMESTAMP '1978-05-02 12:34:56.78'");
checkLiteral2("TIMESTAMP '1978-5-2 2:4:6.80'",
"TIMESTAMP '1978-05-02 02:04:06.80'");
checkLiteral("'I can''t explain'");
checkLiteral("''");
checkLiteral("TRUE");
checkLiteral("123");
checkLiteral("123.45");
checkLiteral("-123.45");
checkLiteral("INTERVAL '1-2' YEAR TO MONTH");
checkLiteral("INTERVAL -'1-2' YEAR TO MONTH");
checkLiteral("INTERVAL '12-11' YEAR TO MONTH");
checkLiteral("INTERVAL '1' YEAR");
checkLiteral("INTERVAL '1' MONTH");
checkLiteral("INTERVAL '12' DAY");
checkLiteral("INTERVAL -'12' DAY");
checkLiteral2("INTERVAL '1 2' DAY TO HOUR",
"INTERVAL '1 02' DAY TO HOUR");
checkLiteral2("INTERVAL '1 2:10' DAY TO MINUTE",
"INTERVAL '1 02:10' DAY TO MINUTE");
checkLiteral2("INTERVAL '1 2:00' DAY TO MINUTE",
"INTERVAL '1 02:00' DAY TO MINUTE");
checkLiteral2("INTERVAL '1 2:34:56' DAY TO SECOND",
"INTERVAL '1 02:34:56' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:34:56.789' DAY TO SECOND",
"INTERVAL '1 02:34:56.789' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:34:56.78' DAY TO SECOND",
"INTERVAL '1 02:34:56.78' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:34:56.078' DAY TO SECOND",
"INTERVAL '1 02:34:56.078' DAY TO SECOND");
checkLiteral2("INTERVAL -'1 2:34:56.078' DAY TO SECOND",
"INTERVAL -'1 02:34:56.078' DAY TO SECOND");
checkLiteral2("INTERVAL '1 2:3:5.070' DAY TO SECOND",
"INTERVAL '1 02:03:05.07' DAY TO SECOND");
checkLiteral("INTERVAL '1:23' HOUR TO MINUTE");
checkLiteral("INTERVAL '1:02' HOUR TO MINUTE");
checkLiteral("INTERVAL -'1:02' HOUR TO MINUTE");
checkLiteral("INTERVAL '1:23:45' HOUR TO SECOND");
checkLiteral("INTERVAL '1:03:05' HOUR TO SECOND");
checkLiteral("INTERVAL '1:23:45.678' HOUR TO SECOND");
checkLiteral("INTERVAL '1:03:05.06' HOUR TO SECOND");
checkLiteral("INTERVAL '12' MINUTE");
checkLiteral("INTERVAL '12:34' MINUTE TO SECOND");
checkLiteral("INTERVAL '12:34.567' MINUTE TO SECOND");
checkLiteral("INTERVAL '12' SECOND");
checkLiteral("INTERVAL '12.345' SECOND");
}
private void checkLiteral(String expression) {
checkLiteral2(expression, expression);
}
private void checkLiteral2(String expression, String expected) {
sql("VALUES " + expression)
.withHsqldb()
.ok("SELECT *\n"
+ "FROM (VALUES (" + expected + ")) AS t (EXPR$0)");
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2625">[CALCITE-2625]
* Removing Window Boundaries from SqlWindow of Aggregate Function which do
* not allow Framing</a>. */
@Test void testRowNumberFunctionForPrintingOfFrameBoundary() {
String query = "SELECT row_number() over (order by \"hire_date\") FROM \"employee\"";
String expected = "SELECT ROW_NUMBER() OVER (ORDER BY \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3112">[CALCITE-3112]
* Support Window in RelToSqlConverter</a>. */
@Test void testConvertWindowToSql() {
String query0 = "SELECT row_number() over (order by \"hire_date\") FROM \"employee\"";
String expected0 = "SELECT ROW_NUMBER() OVER (ORDER BY \"hire_date\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query1 = "SELECT rank() over (order by \"hire_date\") FROM \"employee\"";
String expected1 = "SELECT RANK() OVER (ORDER BY \"hire_date\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query2 = "SELECT lead(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\")\n"
+ "FROM \"employee\"";
String expected2 = "SELECT LEAD(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" "
+ "ORDER BY \"employee_id\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query3 = "SELECT lag(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\")\n"
+ "FROM \"employee\"";
String expected3 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query4 = "SELECT lag(\"employee_id\",1,'NA') "
+ "over (partition by \"hire_date\" order by \"employee_id\") as lag1, "
+ "lag(\"employee_id\",1,'NA') "
+ "over (partition by \"birth_date\" order by \"employee_id\") as lag2, "
+ "count(*) over (partition by \"hire_date\" order by \"employee_id\") as count1, "
+ "count(*) over (partition by \"birth_date\" order by \"employee_id\") as count2\n"
+ "FROM \"employee\"";
String expected4 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
+ "LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
+ "COUNT(*) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
+ "COUNT(*) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query5 = "SELECT lag(\"employee_id\",1,'NA') "
+ "over (partition by \"hire_date\" order by \"employee_id\") as lag1, "
+ "lag(\"employee_id\",1,'NA') "
+ "over (partition by \"birth_date\" order by \"employee_id\") as lag2, "
+ "max(sum(\"employee_id\")) over (partition by \"hire_date\" order by \"employee_id\") "
+ "as count1, "
+ "max(sum(\"employee_id\")) over (partition by \"birth_date\" order by \"employee_id\") "
+ "as count2\n"
+ "FROM \"employee\" group by \"employee_id\", \"hire_date\", \"birth_date\"";
String expected5 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
+ "LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
+ "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
+ "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"employee_id\", \"hire_date\", \"birth_date\"";
String query6 = "SELECT lag(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\"), \"hire_date\"\n"
+ "FROM \"employee\"\n"
+ "group by \"hire_date\", \"employee_id\"";
String expected6 = "SELECT LAG(\"employee_id\", 1, 'NA') "
+ "OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\"), \"hire_date\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"hire_date\", \"employee_id\"";
String query7 = "SELECT "
+ "count(distinct \"employee_id\") over (order by \"hire_date\") FROM \"employee\"";
String expected7 = "SELECT "
+ "COUNT(DISTINCT \"employee_id\") "
+ "OVER (ORDER BY \"hire_date\" RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$0\""
+ "\nFROM \"foodmart\".\"employee\"";
String query8 = "SELECT "
+ "sum(distinct \"position_id\") over (order by \"hire_date\") FROM \"employee\"";
String expected8 =
"SELECT CASE WHEN (COUNT(DISTINCT \"position_id\") OVER (ORDER BY \"hire_date\" "
+ "RANGE"
+ " BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)) > 0 THEN COALESCE(SUM(DISTINCT "
+ "\"position_id\") OVER (ORDER BY \"hire_date\" RANGE BETWEEN UNBOUNDED "
+ "PRECEDING AND CURRENT ROW), 0) ELSE NULL END\n"
+ "FROM \"foodmart\".\"employee\"";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(ProjectToWindowRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW);
sql(query0).optimize(rules, hepPlanner).ok(expected0);
sql(query1).optimize(rules, hepPlanner).ok(expected1);
sql(query2).optimize(rules, hepPlanner).ok(expected2);
sql(query3).optimize(rules, hepPlanner).ok(expected3);
sql(query4).optimize(rules, hepPlanner).ok(expected4);
sql(query5).optimize(rules, hepPlanner).ok(expected5);
sql(query6).optimize(rules, hepPlanner).ok(expected6);
sql(query7).optimize(rules, hepPlanner).ok(expected7);
sql(query8).optimize(rules, hepPlanner).ok(expected8);
}
/**
* Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3866">[CALCITE-3866]
* "numeric field overflow" when running the generated SQL in PostgreSQL </a>.
*/
@Test void testSumReturnType() {
String query =
"select sum(e1.\"store_sales\"), sum(e2.\"store_sales\") from \"sales_fact_dec_1998\" as "
+ "e1 , \"sales_fact_dec_1998\" as e2 where e1.\"product_id\" = e2.\"product_id\"";
String expect = "SELECT SUM(CAST(SUM(\"store_sales\") * \"t0\".\"$f1\" AS DECIMAL"
+ "(19, 4))), SUM(CAST(\"t\".\"$f2\" * SUM(\"store_sales\") AS DECIMAL(19, 4)))\n"
+ "FROM (SELECT \"product_id\", SUM(\"store_sales\"), COUNT(*) AS \"$f2\"\n"
+ "FROM \"foodmart\".\"sales_fact_dec_1998\"\n"
+ "GROUP BY \"product_id\") AS \"t\"\n"
+ "INNER JOIN "
+ "(SELECT \"product_id\", COUNT(*) AS \"$f1\", SUM(\"store_sales\")\n"
+ "FROM \"foodmart\".\"sales_fact_dec_1998\"\n"
+ "GROUP BY \"product_id\") AS \"t0\" ON \"t\".\"product_id\" = \"t0\".\"product_id\"";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterJoinRule.class);
builder.addRuleClass(AggregateProjectMergeRule.class);
builder.addRuleClass(AggregateJoinTransposeRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(
CoreRules.FILTER_INTO_JOIN,
CoreRules.JOIN_CONDITION_PUSH,
CoreRules.AGGREGATE_PROJECT_MERGE, CoreRules.AGGREGATE_JOIN_TRANSPOSE_EXTENDED);
sql(query).withPostgresql().optimize(rules, hepPlanner).ok(expect);
}
@Test void testRankFunctionForPrintingOfFrameBoundary() {
String query = "SELECT rank() over (order by \"hire_date\") FROM \"employee\"";
String expected = "SELECT RANK() OVER (ORDER BY \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
@Test void testLeadFunctionForPrintingOfFrameBoundary() {
String query = "SELECT lead(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\") FROM \"employee\"";
String expected = "SELECT LEAD(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
@Test void testLagFunctionForPrintingOfFrameBoundary() {
String query = "SELECT lag(\"employee_id\",1,'NA') over "
+ "(partition by \"hire_date\" order by \"employee_id\") FROM \"employee\"";
String expected = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3876">[CALCITE-3876]
* RelToSqlConverter should not combine Projects when top Project contains
* window function referencing window function from bottom Project</a>. */
@Test void testWindowOnWindowDoesNotCombineProjects() {
final String query = "SELECT ROW_NUMBER() OVER (ORDER BY rn)\n"
+ "FROM (SELECT *,\n"
+ " ROW_NUMBER() OVER (ORDER BY \"product_id\") as rn\n"
+ " FROM \"foodmart\".\"product\")";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY \"RN\")\n"
+ "FROM (SELECT \"product_class_id\", \"product_id\", \"brand_name\","
+ " \"product_name\", \"SKU\", \"SRP\", \"gross_weight\","
+ " \"net_weight\", \"recyclable_package\", \"low_fat\","
+ " \"units_per_case\", \"cases_per_pallet\", \"shelf_width\","
+ " \"shelf_height\", \"shelf_depth\","
+ " ROW_NUMBER() OVER (ORDER BY \"product_id\") AS \"RN\"\n"
+ "FROM \"foodmart\".\"product\") AS \"t\"";
sql(query)
.withPostgresql()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1798">[CALCITE-1798]
* Generate dialect-specific SQL for FLOOR operator</a>. */
@Test void testFloor() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT TRUNC(hire_date, 'MI')\nFROM foodmart.employee";
sql(query)
.withHsqldb()
.ok(expected);
}
@Test void testFloorClickHouse() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT toStartOfMinute(`hire_date`)\nFROM `foodmart`.`employee`";
sql(query)
.withClickHouse()
.ok(expected);
}
@Test void testFloorPostgres() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT DATE_TRUNC('MINUTE', \"hire_date\")\nFROM \"foodmart\".\"employee\"";
sql(query)
.withPostgresql()
.ok(expected);
}
@Test void testFloorOracle() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT TRUNC(\"hire_date\", 'MINUTE')\nFROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testFloorPresto() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT DATE_TRUNC('MINUTE', \"hire_date\")\nFROM \"foodmart\".\"employee\"";
sql(query)
.withPresto()
.ok(expected);
}
@Test void testFloorMssqlWeek() {
String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
String expected = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(10), "
+ "DATEADD(day, - (6 + DATEPART(weekday, [hire_date] )) % 7, [hire_date] ), 126))\n"
+ "FROM [foodmart].[employee]";
sql(query).withMssql()
.ok(expected);
}
@Test void testFloorMssqlMonth() {
String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
String expected = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(7), [hire_date] , 126)+'-01')\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withMssql()
.ok(expected);
}
@Test void testFloorMysqlMonth() {
String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-01')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorWeek() {
final String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
final String expectedClickHouse = "SELECT toMonday(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
final String expectedMssql = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(10), "
+ "DATEADD(day, - (6 + DATEPART(weekday, [hire_date] )) % 7, [hire_date] ), 126))\n"
+ "FROM [foodmart].[employee]";
final String expectedMysql = "SELECT STR_TO_DATE(DATE_FORMAT(`hire_date` , '%x%v-1'), "
+ "'%x%v-%w')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withMssql()
.ok(expectedMssql)
.withMysql()
.ok(expectedMysql);
}
@Test void testUnparseSqlIntervalQualifierDb2() {
String queryDatePlus = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDatePlus = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE (employee.hire_date + 19800 SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(queryDatePlus)
.withDb2()
.ok(expectedDatePlus);
String queryDateMinus = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinus = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE (employee.hire_date - 19800 SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(queryDateMinus)
.withDb2()
.ok(expectedDateMinus);
}
@Test void testUnparseSqlIntervalQualifierMySql() {
final String sql0 = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect0 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` - INTERVAL '19800' SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql0).withMysql().ok(expect0);
final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '10' HOUR)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql1).withMysql().ok(expect1);
final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '1-2' year to month > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect2 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '1-2' YEAR_MONTH)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql2).withMysql().ok(expect2);
final String sql3 = "select * from \"employee\" "
+ "where \"hire_date\" + INTERVAL '39:12' MINUTE TO SECOND"
+ " > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect3 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '39:12' MINUTE_SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(sql3).withMysql().ok(expect3);
}
@Test void testUnparseSqlIntervalQualifierMsSql() {
String queryDatePlus = "select * from \"employee\" where \"hire_date\" +"
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDatePlus = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, 19800, [hire_date]) > CAST('2005-10-17 00:00:00' AS TIMESTAMP(0))";
sql(queryDatePlus)
.withMssql()
.ok(expectedDatePlus);
String queryDateMinus = "select * from \"employee\" where \"hire_date\" -"
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinus = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, -19800, [hire_date]) > CAST('2005-10-17 00:00:00' AS TIMESTAMP(0))";
sql(queryDateMinus)
.withMssql()
.ok(expectedDateMinus);
String queryDateMinusNegate = "select * from \"employee\" "
+ "where \"hire_date\" -INTERVAL '-19800' SECOND(5)"
+ " > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinusNegate = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, 19800, [hire_date]) > CAST('2005-10-17 00:00:00' AS TIMESTAMP(0))";
sql(queryDateMinusNegate)
.withMssql()
.ok(expectedDateMinusNegate);
}
@Test public void testUnparseTimeLiteral() {
String queryDatePlus = "select TIME '11:25:18' "
+ "from \"employee\"";
String expectedBQSql = "SELECT TIME '11:25:18'\n"
+ "FROM foodmart.employee";
String expectedSql = "SELECT CAST('11:25:18' AS TIME(0))\n"
+ "FROM [foodmart].[employee]";
sql(queryDatePlus)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedSql);
}
@Test void testUnparseSqlIntervalQualifierBigQuery() {
final String sql0 = "select * from \"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect0 = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE TIMESTAMP_SUB(hire_date, INTERVAL 19800 SECOND)"
+ " > CAST('2005-10-17 00:00:00' AS DATETIME)";
sql(sql0).withBigQuery().ok(expect0);
final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE TIMESTAMP_ADD(hire_date, INTERVAL 10 HOUR)"
+ " > CAST('2005-10-17 00:00:00' AS DATETIME)";
sql(sql1).withBigQuery().ok(expect1);
final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '1 2:34:56.78' DAY TO SECOND > TIMESTAMP '2005-10-17 00:00:00' ";
sql(sql2).withBigQuery().throws_("For input string: \"56.78\"");
}
@Test public void testFloorMysqlWeek() {
String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
String expected = "SELECT STR_TO_DATE(DATE_FORMAT(`hire_date` , '%x%v-1'), '%x%v-%w')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorMonth() {
final String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
final String expectedClickHouse = "SELECT toStartOfMonth(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
final String expectedMssql = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(7), [hire_date] , "
+ "126)+'-01')\n"
+ "FROM [foodmart].[employee]";
final String expectedMysql = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-01')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withMssql()
.ok(expectedMssql)
.withMysql()
.ok(expectedMysql);
}
@Test void testFloorMysqlHour() {
String query = "SELECT floor(\"hire_date\" TO HOUR) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:00:00')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorMysqlMinute() {
String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
@Test void testFloorMysqlSecond() {
String query = "SELECT floor(\"hire_date\" TO SECOND) FROM \"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:%s')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
.withMysql()
.ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1826">[CALCITE-1826]
* JDBC dialect-specific FLOOR fails when in GROUP BY</a>. */
@Test void testFloorWithGroupBy() {
final String query = "SELECT floor(\"hire_date\" TO MINUTE)\n"
+ "FROM \"employee\"\n"
+ "GROUP BY floor(\"hire_date\" TO MINUTE)";
final String expected = "SELECT TRUNC(hire_date, 'MI')\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY TRUNC(hire_date, 'MI')";
final String expectedClickHouse = "SELECT toStartOfMinute(`hire_date`)\n"
+ "FROM `foodmart`.`employee`\n"
+ "GROUP BY toStartOfMinute(`hire_date`)";
final String expectedOracle = "SELECT TRUNC(\"hire_date\", 'MINUTE')\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY TRUNC(\"hire_date\", 'MINUTE')";
final String expectedPostgresql = "SELECT DATE_TRUNC('MINUTE', \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY DATE_TRUNC('MINUTE', \"hire_date\")";
final String expectedMysql = "SELECT"
+ " DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')\n"
+ "FROM `foodmart`.`employee`\n"
+ "GROUP BY DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')";
sql(query)
.withHsqldb()
.ok(expected)
.withClickHouse()
.ok(expectedClickHouse)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql)
.withMysql()
.ok(expectedMysql);
}
@Test void testSubstring() {
final String query = "select substring(\"brand_name\" from 2) "
+ "from \"product\"\n";
final String expectedClickHouse = "SELECT substring(`brand_name`, 2)\n"
+ "FROM `foodmart`.`product`";
final String expectedOracle = "SELECT SUBSTR(\"brand_name\", 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT SUBSTRING(\"brand_name\" FROM 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPresto = "SELECT SUBSTR(\"brand_name\", 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowflake = "SELECT SUBSTR(\"brand_name\", 2)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedRedshift = expectedPostgresql;
final String expectedMysql = "SELECT SUBSTRING(`brand_name` FROM 2)\n"
+ "FROM `foodmart`.`product`";
final String expectedHive = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
final String expectedSpark = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
final String expectedBiqQuery = "SELECT SUBSTR(brand_name, 2)\n"
+ "FROM foodmart.product";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql)
.withPresto()
.ok(expectedPresto)
.withSnowflake()
.ok(expectedSnowflake)
.withRedshift()
.ok(expectedRedshift)
.withMysql()
.ok(expectedMysql)
.withMssql()
// mssql does not support this syntax and so should fail
.throws_("MSSQL SUBSTRING requires FROM and FOR arguments")
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBiqQuery);
}
@Test void testSubstringWithFor() {
final String query = "select substring(\"brand_name\" from 2 for 3) "
+ "from \"product\"\n";
final String expectedClickHouse = "SELECT substring(`brand_name`, 2, 3)\n"
+ "FROM `foodmart`.`product`";
final String expectedOracle = "SELECT SUBSTR(\"brand_name\", 2, 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT SUBSTRING(\"brand_name\" FROM 2 FOR 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedPresto = "SELECT SUBSTR(\"brand_name\", 2, 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowflake = "SELECT SUBSTR(\"brand_name\", 2, 3)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedRedshift = expectedPostgresql;
final String expectedMysql = "SELECT SUBSTRING(`brand_name` FROM 2 FOR 3)\n"
+ "FROM `foodmart`.`product`";
final String expectedMssql = "SELECT SUBSTRING([brand_name], 2, 3)\n"
+ "FROM [foodmart].[product]";
final String expectedHive = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
final String expectedSpark = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
sql(query)
.withClickHouse()
.ok(expectedClickHouse)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql)
.withPresto()
.ok(expectedPresto)
.withSnowflake()
.ok(expectedSnowflake)
.withRedshift()
.ok(expectedRedshift)
.withMysql()
.ok(expectedMysql)
.withMssql()
.ok(expectedMssql)
.withSpark()
.ok(expectedSpark)
.withHive()
.ok(expectedHive);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-1849">[CALCITE-1849]
* Support sub-queries (RexSubQuery) in RelToSqlConverter</a>. */
@Test void testExistsWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where exists (select count(*) "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE EXISTS (SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testNotExistsWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where not exists (select count(*) "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE NOT EXISTS (SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testSubQueryInWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_id\" in (select \"product_id\" "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" IN (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testSubQueryInWithExpand2() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_id\" in (1, 2)";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = 1 OR \"product_id\" = 2";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testSubQueryNotInWithExpand() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_id\" not in (select \"product_id\" "
+ "from \"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" NOT IN (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
}
@Test void testLike() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_name\" like 'abc'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" LIKE 'abc'";
sql(query).ok(expected);
}
@Test void testNotLike() {
String query = "select \"product_name\" from \"product\" a "
+ "where \"product_name\" not like 'abc'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" NOT LIKE 'abc'";
sql(query).ok(expected);
}
@Test void testMatchRecognizePatternExpression() {
String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " partition by \"product_class_id\", \"brand_name\"\n"
+ " order by \"product_class_id\" asc, \"brand_name\" desc\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "PARTITION BY \"product_class_id\", \"brand_name\"\n"
+ "ORDER BY \"product_class_id\", \"brand_name\" DESC\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+$)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" + $)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (^strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (^ \"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (^strt down+ up+$)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (^ \"STRT\" \"DOWN\" + \"UP\" + $)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression5() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down* up?)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" * \"UP\" ?)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression6() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt {-down-} up?)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" {- \"DOWN\" -} \"UP\" ?)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression7() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down{2} up{3,})\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" { 2 } \"UP\" { 3, })\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression8() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down{,2} up{3,5})\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" { , 2 } \"UP\" { 3, 5 })\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression9() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt {-down+-} {-up*-})\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" {- \"DOWN\" + -} {- \"UP\" * -})\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression10() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (A B C | A C B | B A C | B C A | C A B | C B A)\n"
+ " define\n"
+ " A as A.\"net_weight\" < PREV(A.\"net_weight\"),\n"
+ " B as B.\"net_weight\" > PREV(B.\"net_weight\"),\n"
+ " C as C.\"net_weight\" < PREV(C.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN "
+ "(\"A\" \"B\" \"C\" | \"A\" \"C\" \"B\" | \"B\" \"A\" \"C\" "
+ "| \"B\" \"C\" \"A\" | \"C\" \"A\" \"B\" | \"C\" \"B\" \"A\")\n"
+ "DEFINE "
+ "\"A\" AS PREV(\"A\".\"net_weight\", 0) < PREV(\"A\".\"net_weight\", 1), "
+ "\"B\" AS PREV(\"B\".\"net_weight\", 0) > PREV(\"B\".\"net_weight\", 1), "
+ "\"C\" AS PREV(\"C\".\"net_weight\", 0) < PREV(\"C\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression11() {
final String sql = "select *\n"
+ " from (select * from \"product\") match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression12() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr order by MR.\"net_weight\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"net_weight\"";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternExpression13() {
final String sql = "select *\n"
+ " from (\n"
+ "select *\n"
+ "from \"sales_fact_1997\" as s\n"
+ "join \"customer\" as c\n"
+ " on s.\"customer_id\" = c.\"customer_id\"\n"
+ "join \"product\" as p\n"
+ " on s.\"product_id\" = p.\"product_id\"\n"
+ "join \"product_class\" as pc\n"
+ " on p.\"product_class_id\" = pc.\"product_class_id\"\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'"
+ ") match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr order by MR.\"net_weight\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\" "
+ "ON \"sales_fact_1997\".\"customer_id\" = \"customer\".\"customer_id\"\n"
+ "INNER JOIN \"foodmart\".\"product\" "
+ "ON \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\"\n"
+ "INNER JOIN \"foodmart\".\"product_class\" "
+ "ON \"product\".\"product_class_id\" = \"product_class\".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' "
+ "AND \"product_class\".\"product_department\" = 'Snacks') "
+ "MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"net_weight\"";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < FIRST(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > LAST(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "FIRST(\"DOWN\".\"net_weight\", 0), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "LAST(\"UP\".\"net_weight\", 0))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\",1),\n"
+ " up as up.\"net_weight\" > LAST(up.\"net_weight\" + up.\"gross_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "LAST(\"UP\".\"net_weight\", 0) + LAST(\"UP\".\"gross_weight\", 0))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeDefineClause4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\",1),\n"
+ " up as up.\"net_weight\" > "
+ "PREV(LAST(up.\"net_weight\" + up.\"gross_weight\"),3)\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(LAST(\"UP\".\"net_weight\", 0) + "
+ "LAST(\"UP\".\"gross_weight\", 0), 3))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures MATCH_NUMBER() as match_num, "
+ " CLASSIFIER() as var_match, "
+ " STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " LAST(up.\"net_weight\") as end_nw"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL MATCH_NUMBER () AS \"MATCH_NUM\", "
+ "FINAL CLASSIFIER() AS \"VAR_MATCH\", "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"END_NW\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " FINAL LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " LAST(up.\"net_weight\") as end_nw"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"END_NW\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " RUNNING LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " LAST(up.\"net_weight\") as end_nw"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL (RUNNING LAST(\"DOWN\".\"net_weight\", 0)) AS \"BOTTOM_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"END_NW\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " FINAL COUNT(up.\"net_weight\") as up_cnt,"
+ " FINAL COUNT(\"net_weight\") as down_cnt,"
+ " RUNNING COUNT(\"net_weight\") as running_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL COUNT(\"UP\".\"net_weight\") AS \"UP_CNT\", "
+ "FINAL COUNT(\"*\".\"net_weight\") AS \"DOWN_CNT\", "
+ "FINAL (RUNNING COUNT(\"*\".\"net_weight\")) AS \"RUNNING_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures5() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
+ " LAST(UP.\"net_weight\") as up_cnt,"
+ " AVG(DOWN.\"net_weight\") as down_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL FIRST(\"STRT\".\"net_weight\", 0) AS \"START_NW\", "
+ "FINAL LAST(\"UP\".\"net_weight\", 0) AS \"UP_CNT\", "
+ "FINAL (SUM(\"DOWN\".\"net_weight\") / "
+ "COUNT(\"DOWN\".\"net_weight\")) AS \"DOWN_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures6() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
+ " LAST(DOWN.\"net_weight\") as up_cnt,"
+ " FINAL SUM(DOWN.\"net_weight\") as down_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL FIRST(\"STRT\".\"net_weight\", 0) AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"UP_CNT\", "
+ "FINAL SUM(\"DOWN\".\"net_weight\") AS \"DOWN_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN "
+ "(\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeMeasures7() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
+ " LAST(DOWN.\"net_weight\") as up_cnt,"
+ " FINAL SUM(DOWN.\"net_weight\") as down_cnt"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr order by start_nw, up_cnt";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL FIRST(\"STRT\".\"net_weight\", 0) AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"UP_CNT\", "
+ "FINAL SUM(\"DOWN\".\"net_weight\") AS \"DOWN_CNT\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN "
+ "(\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"START_NW\", \"UP_CNT\"";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to next row\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip past last row\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP PAST LAST ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to FIRST down\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO FIRST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE \"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to last down\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO LAST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizePatternSkip5() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to down\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO LAST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " after match skip to down\n"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > NEXT(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO LAST \"DOWN\"\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " AVG(STDN.\"net_weight\") as avg_stdn"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL (SUM(\"STDN\".\"net_weight\") / "
+ "COUNT(\"STDN\".\"net_weight\")) AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset3() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeSubset4() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down), stdn2 = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\"), \"STDN2\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeRowsPerMatch1() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " ONE ROW PER MATCH\n"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down), stdn2 = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "FINAL \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "FINAL LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "FINAL SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\"), \"STDN2\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeRowsPerMatch2() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
+ " SUM(STDN.\"net_weight\") as avg_stdn"
+ " ALL ROWS PER MATCH\n"
+ " pattern (strt down+ up+)\n"
+ " subset stdn = (strt, down), stdn2 = (strt, down)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") "
+ "MATCH_RECOGNIZE(\n"
+ "MEASURES "
+ "RUNNING \"STRT\".\"net_weight\" AS \"START_NW\", "
+ "RUNNING LAST(\"DOWN\".\"net_weight\", 0) AS \"BOTTOM_NW\", "
+ "RUNNING SUM(\"STDN\".\"net_weight\") AS \"AVG_STDN\"\n"
+ "ALL ROWS PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "SUBSET \"STDN\" = (\"DOWN\", \"STRT\"), \"STDN2\" = (\"DOWN\", \"STRT\")\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) < "
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeWithin() {
final String sql = "select *\n"
+ " from \"employee\" match_recognize\n"
+ " (\n"
+ " order by \"hire_date\"\n"
+ " ALL ROWS PER MATCH\n"
+ " pattern (strt down+ up+) within interval '3:12:22.123' hour to second\n"
+ " define\n"
+ " down as down.\"salary\" < PREV(down.\"salary\"),\n"
+ " up as up.\"salary\" > prev(up.\"salary\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"employee\") "
+ "MATCH_RECOGNIZE(\n"
+ "ORDER BY \"hire_date\"\n"
+ "ALL ROWS PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +) WITHIN INTERVAL '3:12:22.123' HOUR TO SECOND\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"salary\", 0) < "
+ "PREV(\"DOWN\".\"salary\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"salary\", 0) > "
+ "PREV(\"UP\".\"salary\", 1))";
sql(sql).ok(expected);
}
@Test void testMatchRecognizeIn() {
final String sql = "select *\n"
+ " from \"product\" match_recognize\n"
+ " (\n"
+ " partition by \"product_class_id\", \"brand_name\"\n"
+ " order by \"product_class_id\" asc, \"brand_name\" desc\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" in (0, 1),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
+ " ) mr";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
+ "PARTITION BY \"product_class_id\", \"brand_name\"\n"
+ "ORDER BY \"product_class_id\", \"brand_name\" DESC\n"
+ "ONE ROW PER MATCH\n"
+ "AFTER MATCH SKIP TO NEXT ROW\n"
+ "PATTERN (\"STRT\" \"DOWN\" + \"UP\" +)\n"
+ "DEFINE "
+ "\"DOWN\" AS PREV(\"DOWN\".\"net_weight\", 0) = "
+ "0 OR PREV(\"DOWN\".\"net_weight\", 0) = 1, "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
sql(sql).ok(expected);
}
@Test void testValues() {
final String sql = "select \"a\"\n"
+ "from (values (1, 'x'), (2, 'yy')) as t(\"a\", \"b\")";
final String expectedHsqldb = "SELECT a\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS t (a, b)";
final String expectedMysql = "SELECT `a`\n"
+ "FROM (SELECT 1 AS `a`, 'x ' AS `b`\n"
+ "UNION ALL\n"
+ "SELECT 2 AS `a`, 'yy' AS `b`) AS `t`";
final String expectedPostgresql = "SELECT \"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t\" (\"a\", \"b\")";
final String expectedOracle = "SELECT \"a\"\n"
+ "FROM (SELECT 1 \"a\", 'x ' \"b\"\n"
+ "FROM \"DUAL\"\n"
+ "UNION ALL\n"
+ "SELECT 2 \"a\", 'yy' \"b\"\n"
+ "FROM \"DUAL\")";
final String expectedHive = "SELECT a\n"
+ "FROM (SELECT 1 a, 'x ' b\n"
+ "UNION ALL\n"
+ "SELECT 2 a, 'yy' b)";
final String expectedSpark = "SELECT a\n"
+ "FROM (SELECT 1 a, 'x ' b\n"
+ "UNION ALL\n"
+ "SELECT 2 a, 'yy' b)";
final String expectedBigQuery = "SELECT a\n"
+ "FROM (SELECT 1 AS a, 'x ' AS b\n"
+ "UNION ALL\n"
+ "SELECT 2 AS a, 'yy' AS b)";
final String expectedSnowflake = "SELECT \"a\"\n"
+ "FROM (SELECT 1 AS \"a\", 'x ' AS \"b\"\n"
+ "UNION ALL\n"
+ "SELECT 2 AS \"a\", 'yy' AS \"b\")";
final String expectedRedshift = expectedPostgresql;
sql(sql)
.withHsqldb()
.ok(expectedHsqldb)
.withMysql()
.ok(expectedMysql)
.withPostgresql()
.ok(expectedPostgresql)
.withOracle()
.ok(expectedOracle)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowflake)
.withRedshift()
.ok(expectedRedshift);
}
@Test void testValuesEmpty() {
final String sql = "select *\n"
+ "from (values (1, 'a'), (2, 'bb')) as t(x, y)\n"
+ "limit 0";
final RuleSet rules =
RuleSets.ofList(PruneEmptyRules.SORT_FETCH_ZERO_INSTANCE);
final String expectedMysql = "SELECT *\n"
+ "FROM (SELECT NULL AS `X`, NULL AS `Y`) AS `t`\n"
+ "WHERE 1 = 0";
final String expectedOracle = "SELECT NULL \"X\", NULL \"Y\"\n"
+ "FROM \"DUAL\"\n"
+ "WHERE 1 = 0";
final String expectedPostgresql = "SELECT *\n"
+ "FROM (VALUES (NULL, NULL)) AS \"t\" (\"X\", \"Y\")\n"
+ "WHERE 1 = 0";
sql(sql)
.optimize(rules, null)
.withMysql()
.ok(expectedMysql)
.withOracle()
.ok(expectedOracle)
.withPostgresql()
.ok(expectedPostgresql);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3840">[CALCITE-3840]
* Re-aliasing of VALUES that has column aliases produces wrong SQL in the
* JDBC adapter</a>. */
@Test void testValuesReAlias() {
final RelBuilder builder = relBuilder();
final RelNode root = builder
.values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
.values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
.join(JoinRelType.FULL)
.project(builder.field("a"))
.build();
final String expectedSql = "SELECT \"t\".\"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t\" (\"a\", \"b\")\n"
+ "FULL JOIN (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t0\" (\"a\", \"b\") ON TRUE";
assertThat(toSql(root), isLinux(expectedSql));
// Now with indentation.
final String expectedSql2 = "SELECT \"t\".\"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ " (2, 'yy')) AS \"t\" (\"a\", \"b\")\n"
+ " FULL JOIN (VALUES (1, 'x '),\n"
+ " (2, 'yy')) AS \"t0\" (\"a\", \"b\") ON TRUE";
assertThat(
toSql(root, DatabaseProduct.CALCITE.getDialect(),
c -> c.withIndentation(2)),
isLinux(expectedSql2));
}
@Test void testSelectWithoutFromEmulationForHiveAndBigQuery() {
String query = "select 2 + 2";
final String expected = "SELECT 2 + 2";
sql(query)
.withHive().ok(expected)
.withBigQuery().ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-2118">[CALCITE-2118]
* RelToSqlConverter should only generate "*" if field names match</a>. */
@Test void testPreserveAlias() {
final String sql = "select \"warehouse_class_id\" as \"id\",\n"
+ " \"description\"\n"
+ "from \"warehouse_class\"";
final String expected = ""
+ "SELECT \"warehouse_class_id\" AS \"id\", \"description\"\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
sql(sql).ok(expected);
final String sql2 = "select \"warehouse_class_id\", \"description\"\n"
+ "from \"warehouse_class\"";
final String expected2 = "SELECT *\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
sql(sql2).ok(expected2);
}
@Test void testPreservePermutation() {
final String sql = "select \"description\", \"warehouse_class_id\"\n"
+ "from \"warehouse_class\"";
final String expected = "SELECT \"description\", \"warehouse_class_id\"\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
sql(sql).ok(expected);
}
@Test void testFieldNamesWithAggregateSubQuery() {
final String query = "select mytable.\"city\",\n"
+ " sum(mytable.\"store_sales\") as \"my-alias\"\n"
+ "from (select c.\"city\", s.\"store_sales\"\n"
+ " from \"sales_fact_1997\" as s\n"
+ " join \"customer\" as c using (\"customer_id\")\n"
+ " group by c.\"city\", s.\"store_sales\") AS mytable\n"
+ "group by mytable.\"city\"";
final String expected = "SELECT \"t0\".\"city\","
+ " SUM(\"t0\".\"store_sales\") AS \"my-alias\"\n"
+ "FROM (SELECT \"customer\".\"city\","
+ " \"sales_fact_1997\".\"store_sales\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "INNER JOIN \"foodmart\".\"customer\""
+ " ON \"sales_fact_1997\".\"customer_id\""
+ " = \"customer\".\"customer_id\"\n"
+ "GROUP BY \"customer\".\"city\","
+ " \"sales_fact_1997\".\"store_sales\") AS \"t0\"\n"
+ "GROUP BY \"t0\".\"city\"";
sql(query).ok(expected);
}
@Test void testUnparseSelectMustUseDialect() {
final String query = "select * from \"product\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.product";
final boolean[] callsUnparseCallOnSqlSelect = {false};
final SqlDialect dialect = new SqlDialect(SqlDialect.EMPTY_CONTEXT) {
@Override public void unparseCall(SqlWriter writer, SqlCall call,
int leftPrec, int rightPrec) {
if (call instanceof SqlSelect) {
callsUnparseCallOnSqlSelect[0] = true;
}
super.unparseCall(writer, call, leftPrec, rightPrec);
}
};
sql(query).dialect(dialect).ok(expected);
assertThat("Dialect must be able to customize unparseCall() for SqlSelect",
callsUnparseCallOnSqlSelect[0], is(true));
}
@Test void testCorrelate() {
final String sql = "select d.\"department_id\", d_plusOne "
+ "from \"department\" as d, "
+ " lateral (select d.\"department_id\" + 1 as d_plusOne"
+ " from (values(true)))";
final String expected = "SELECT \"$cor0\".\"department_id\", \"$cor0\".\"D_PLUSONE\"\n"
+ "FROM \"foodmart\".\"department\" AS \"$cor0\",\n"
+ "LATERAL (SELECT \"$cor0\".\"department_id\" + 1 AS \"D_PLUSONE\"\n"
+ "FROM (VALUES (TRUE)) AS \"t\" (\"EXPR$0\")) AS \"t0\"";
sql(sql).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3651">[CALCITE-3651]
* NullPointerException when convert relational algebra that correlates TableFunctionScan</a>. */
@Test void testLateralCorrelate() {
final String query = "select * from \"product\",\n"
+ "lateral table(RAMP(\"product\".\"product_id\"))";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" AS \"$cor0\",\n"
+ "LATERAL (SELECT *\n"
+ "FROM TABLE(RAMP(\"$cor0\".\"product_id\"))) AS \"t\"";
sql(query).ok(expected);
}
@Test void testUncollectExplicitAlias() {
final String sql = "select did + 1\n"
+ "from unnest(select collect(\"department_id\") as deptid"
+ " from \"department\") as t(did)";
final String expected = "SELECT \"DEPTID\" + 1\n"
+ "FROM UNNEST (SELECT COLLECT(\"department_id\") AS \"DEPTID\"\n"
+ "FROM \"foodmart\".\"department\") AS \"t0\" (\"DEPTID\")";
sql(sql).ok(expected);
}
@Test void testUncollectImplicitAlias() {
final String sql = "select did + 1\n"
+ "from unnest(select collect(\"department_id\") "
+ " from \"department\") as t(did)";
final String expected = "SELECT \"col_0\" + 1\n"
+ "FROM UNNEST (SELECT COLLECT(\"department_id\")\n"
+ "FROM \"foodmart\".\"department\") AS \"t0\" (\"col_0\")";
sql(sql).ok(expected);
}
@Test void testWithinGroup1() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"net_weight\" desc) "
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testWithinGroup2() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"low_fat\", \"net_weight\" desc nulls last) "
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"low_fat\", \"net_weight\" DESC NULLS LAST)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testWithinGroup3() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"net_weight\" desc), "
+ "min(\"low_fat\")"
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC), MIN(\"low_fat\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testWithinGroup4() {
final String query = "select \"product_class_id\", collect(\"net_weight\") "
+ "within group (order by \"net_weight\" desc) filter (where \"net_weight\" > 0)"
+ "from \"product\" group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "FILTER (WHERE \"net_weight\" > 0 IS TRUE) "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
sql(query).ok(expected);
}
@Test void testJsonValueExpressionOperator() {
String query = "select \"product_name\" format json, "
+ "\"product_name\" format json encoding utf8, "
+ "\"product_name\" format json encoding utf16, "
+ "\"product_name\" format json encoding utf32 from \"product\"";
final String expected = "SELECT \"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonExists() {
String query = "select json_exists(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_EXISTS(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonPretty() {
String query = "select json_pretty(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_PRETTY(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonValue() {
String query = "select json_value(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_VALUE(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonQuery() {
String query = "select json_query(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_QUERY(\"product_name\", 'lax $' "
+ "WITHOUT ARRAY WRAPPER NULL ON EMPTY NULL ON ERROR)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonArray() {
String query = "select json_array(\"product_name\", \"product_name\") from \"product\"";
final String expected = "SELECT JSON_ARRAY(\"product_name\", \"product_name\" ABSENT ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonArrayAgg() {
String query = "select json_arrayagg(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_ARRAYAGG(\"product_name\" ABSENT ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonObject() {
String query = "select json_object(\"product_name\": \"product_id\") from \"product\"";
final String expected = "SELECT "
+ "JSON_OBJECT(KEY \"product_name\" VALUE \"product_id\" NULL ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonObjectAgg() {
String query = "select json_objectagg(\"product_name\": \"product_id\") from \"product\"";
final String expected = "SELECT "
+ "JSON_OBJECTAGG(KEY \"product_name\" VALUE \"product_id\" NULL ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonPredicate() {
String query = "select "
+ "\"product_name\" is json, "
+ "\"product_name\" is json value, "
+ "\"product_name\" is json object, "
+ "\"product_name\" is json array, "
+ "\"product_name\" is json scalar, "
+ "\"product_name\" is not json, "
+ "\"product_name\" is not json value, "
+ "\"product_name\" is not json object, "
+ "\"product_name\" is not json array, "
+ "\"product_name\" is not json scalar "
+ "from \"product\"";
final String expected = "SELECT "
+ "\"product_name\" IS JSON VALUE, "
+ "\"product_name\" IS JSON VALUE, "
+ "\"product_name\" IS JSON OBJECT, "
+ "\"product_name\" IS JSON ARRAY, "
+ "\"product_name\" IS JSON SCALAR, "
+ "\"product_name\" IS NOT JSON VALUE, "
+ "\"product_name\" IS NOT JSON VALUE, "
+ "\"product_name\" IS NOT JSON OBJECT, "
+ "\"product_name\" IS NOT JSON ARRAY, "
+ "\"product_name\" IS NOT JSON SCALAR\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testCrossJoinEmulationForSpark() {
String query = "select * from \"employee\", \"department\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "CROSS JOIN foodmart.department";
sql(query).withSpark().ok(expected);
}
@Test void testCrossJoinEmulationForBigQuery() {
String query = "select * from \"employee\", \"department\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON TRUE";
sql(query).withBigQuery().ok(expected);
}
@Test void testSubstringInSpark() {
final String query = "select substring(\"brand_name\" from 2) "
+ "from \"product\"\n";
final String expected = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
sql(query).withSpark().ok(expected);
}
@Test void testSubstringWithForInSpark() {
final String query = "select substring(\"brand_name\" from 2 for 3) "
+ "from \"product\"\n";
final String expected = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
sql(query).withSpark().ok(expected);
}
@Test void testFloorInSpark() {
final String query = "select floor(\"hire_date\" TO MINUTE) "
+ "from \"employee\"";
final String expected = "SELECT DATE_TRUNC('MINUTE', hire_date)\n"
+ "FROM foodmart.employee";
sql(query).withSpark().ok(expected);
}
@Test void testNumericFloorInSpark() {
final String query = "select floor(\"salary\") "
+ "from \"employee\"";
final String expected = "SELECT FLOOR(salary)\n"
+ "FROM foodmart.employee";
sql(query).withSpark().ok(expected);
}
@Test void testJsonStorageSize() {
String query = "select json_storage_size(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_STORAGE_SIZE(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testCubeWithGroupBy() {
final String query = "select count(*) "
+ "from \"foodmart\".\"product\" "
+ "group by cube(\"product_id\",\"product_class_id\")";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY CUBE(\"product_id\", \"product_class_id\")";
final String expectedInSpark = "SELECT COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id, product_class_id WITH CUBE";
final String expectedPresto = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY CUBE(\"product_id\", \"product_class_id\")";
sql(query)
.ok(expected)
.withSpark()
.ok(expectedInSpark)
.withPresto()
.ok(expectedPresto);
}
@Test void testRollupWithGroupBy() {
final String query = "select count(*) "
+ "from \"foodmart\".\"product\" "
+ "group by rollup(\"product_id\",\"product_class_id\")";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_id\", \"product_class_id\")";
final String expectedInSpark = "SELECT COUNT(*)\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id, product_class_id WITH ROLLUP";
final String expectedPresto = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"product_id\", \"product_class_id\")";
sql(query)
.ok(expected)
.withSpark()
.ok(expectedInSpark)
.withPresto()
.ok(expectedPresto);
}
@Test public void testCastInStringOperandOfComparison() {
final String query = "select \"employee_id\" "
+ "from \"foodmart\".\"employee\" "
+ "where 10 = cast('10' as int) and \"birth_date\" = cast('1914-02-02' as date) or "
+ "\"hire_date\" = cast('1996-01-01 '||'00:00:00' as timestamp)";
final String expected = "SELECT \"employee_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "WHERE 10 = '10' AND \"birth_date\" = '1914-02-02' OR \"hire_date\" = '1996-01-01 ' || "
+ "'00:00:00'";
final String expectedBiqquery = "SELECT employee_id\n"
+ "FROM foodmart.employee\n"
+ "WHERE 10 = CAST('10' AS INT64) AND birth_date = '1914-02-02' OR hire_date = CAST"
+ "(CONCAT('1996-01-01 ', '00:00:00') AS DATETIME)";
final String mssql = "SELECT [employee_id]\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE 10 = '10' AND [birth_date] = '1914-02-02' OR [hire_date] = CONCAT('1996-01-01 ', '00:00:00')";
sql(query)
.ok(expected)
.withBigQuery()
.ok(expectedBiqquery)
.withMssql()
.ok(mssql);
}
@Test public void testRegexSubstrFunction2Args() {
final String query = "select regexp_substr('choco chico chipo', '.*cho*p*c*?.*')"
+ "from \"foodmart\".\"product\"";
final String expected = "SELECT REGEXP_SUBSTR('choco chico chipo', '.*cho*p*c*?.*')\n"
+ "FROM foodmart.product";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction3Args() {
final String query = "select \"product_id\", regexp_substr('choco chico chipo', "
+ "'.*cho*p*c*?.*', 7)\n"
+ "from \"foodmart\".\"product\" where \"product_id\" = 1";
final String expected = "SELECT product_id, REGEXP_SUBSTR('choco chico chipo', "
+ "'.*cho*p*c*?.*', 7)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction4Args() {
final String query = "select \"product_id\", regexp_substr('chocolate chip cookies', 'c+.{2}',"
+ " 4, 2)\n"
+ "from \"foodmart\".\"product\" where \"product_id\" in (1, 2, 3)";
final String expected = "SELECT product_id, REGEXP_SUBSTR('chocolate chip "
+ "cookies', 'c+.{2}', 4, 2)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1 OR product_id = 2 OR product_id = 3";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction5Args() {
final String query = "select regexp_substr('chocolate Chip cookies', 'c+.{2}',"
+ " 1, 2, 'i')\n"
+ "from \"foodmart\".\"product\" where \"product_id\" in (1, 2, 3, 4)";
final String expected = "SELECT "
+ "REGEXP_SUBSTR('chocolate Chip cookies', '(?i)c+.{2}', 1, 2)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1 OR product_id = 2 OR product_id = 3 OR product_id = 4";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testRegexSubstrFunction5ArgswithBackSlash() {
final String query = "select regexp_substr('chocolate Chip cookies','[-\\_] V[0-9]+',"
+ "1,1,'i')\n"
+ "from \"foodmart\".\"product\" where \"product_id\" in (1, 2, 3, 4)";
final String expected = "SELECT "
+ "REGEXP_SUBSTR('chocolate Chip cookies', '(?i)[-\\\\_] V[0-9]+', 1, 1)\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1 OR product_id = 2 OR product_id = 3 OR product_id = 4";
sql(query)
.withBigQuery()
.ok(expected);
}
@Test public void testTimestampFunctionRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode currentTimestampRexNode = builder.call(SqlLibraryOperators.CURRENT_TIMESTAMP,
builder.literal(6));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(currentTimestampRexNode, "CT"))
.build();
final String expectedSql = "SELECT CURRENT_TIMESTAMP(6) AS \"CT\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT CAST(FORMAT_TIMESTAMP('%F %H:%M:%E6S', "
+ "CURRENT_DATETIME()) AS DATETIME) AS CT\n"
+ "FROM scott.EMP";
final String expectedSpark = "SELECT CAST(DATE_FORMAT(CURRENT_TIMESTAMP, 'yyyy-MM-dd HH:mm:ss"
+ ".SSSSSS') AS TIMESTAMP) CT\nFROM scott.EMP";
final String expectedHive = "SELECT CAST(DATE_FORMAT(CURRENT_TIMESTAMP, 'yyyy-MM-dd HH:mm:ss"
+ ".ssssss') AS TIMESTAMP) CT\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
}
@Test void testJsonType() {
String query = "select json_type(\"product_name\") from \"product\"";
final String expected = "SELECT "
+ "JSON_TYPE(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonDepth() {
String query = "select json_depth(\"product_name\") from \"product\"";
final String expected = "SELECT "
+ "JSON_DEPTH(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonLength() {
String query = "select json_length(\"product_name\", 'lax $'), "
+ "json_length(\"product_name\") from \"product\"";
final String expected = "SELECT JSON_LENGTH(\"product_name\", 'lax $'), "
+ "JSON_LENGTH(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testJsonKeys() {
String query = "select json_keys(\"product_name\", 'lax $') from \"product\"";
final String expected = "SELECT JSON_KEYS(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test public void testDateSubIntervalMonthFunction() {
String query = "select \"birth_date\" - INTERVAL -'1' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, -1)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - INTERVAL '1' MONTH\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL -1 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testDatePlusIntervalMonthFunctionWithArthOps() {
String query = "select \"birth_date\" + -10 * INTERVAL '1' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, -10)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + -10 * INTERVAL '1' MONTH\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL -10 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testTimestampPlusIntervalMonthFunctionWithArthOps() {
String query = "select \"hire_date\" + -10 * INTERVAL '1' MONTH from \"employee\"";
final String expectedBigQuery = "SELECT CAST(DATETIME_ADD(CAST(hire_date AS DATETIME), "
+ "INTERVAL "
+ "-10 MONTH) AS DATETIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testDatePlusIntervalMonthFunctionWithCol() {
String query = "select \"birth_date\" + \"store_id\" * INTERVAL '10' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, store_id * 10)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * INTERVAL '10' MONTH\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id * 10 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testDatePlusIntervalMonthFunctionWithArithOp() {
String query = "select \"birth_date\" + 10 * INTERVAL '2' MONTH from \"employee\"";
final String expectedHive = "SELECT ADD_MONTHS(birth_date, 10 * 2)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + 10 * INTERVAL '2' MONTH\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 10 * 2 MONTH)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark);
}
@Test public void testDatePlusColumnFunction() {
String query = "select \"birth_date\" + INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 1, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateSubColumnFunction() {
String query = "select \"birth_date\" - INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - INTERVAL '1' DAY\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, -1, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateValuePlusColumnFunction() {
String query = "select DATE'2018-01-01' + INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(DATE '2018-01-01', 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT DATE '2018-01-01' + INTERVAL '1' DAY\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(DATE '2018-01-01', INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 1, DATE '2018-01-01')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateValueSubColumnFunction() {
String query = "select DATE'2018-01-01' - INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(DATE '2018-01-01', 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT DATE '2018-01-01' - INTERVAL '1' DAY\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(DATE '2018-01-01', INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, -1, DATE '2018-01-01')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateIntColumnFunction() {
String query = "select \"birth_date\" + INTERVAL '2' day from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 2) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '2' DAY\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 2 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 2, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testIntervalMinute() {
String query = "select cast(\"birth_date\" as timestamp) + INTERVAL\n"
+ "'2' minute from \"employee\"";
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(CAST(birth_date AS "
+ "DATETIME), INTERVAL 2 MINUTE)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testIntervalHour() {
String query = "select cast(\"birth_date\" as timestamp) + INTERVAL\n"
+ "'2' hour from \"employee\"";
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(CAST(birth_date AS "
+ "DATETIME), INTERVAL 2 HOUR)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testIntervalSecond() {
String query = "select cast(\"birth_date\" as timestamp) + INTERVAL '2'\n"
+ "second from \"employee\"";
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(CAST(birth_date AS"
+ " DATETIME), INTERVAL 2 SECOND)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testDateSubInterFunction() {
String query = "select \"birth_date\" - INTERVAL '2' day from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, 2) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - INTERVAL '2' DAY"
+ "\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL 2 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, -2, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusColumnVariFunction() {
String query = "select \"birth_date\" + \"store_id\" * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * INTERVAL '1' DAY"
+ "\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + \"store_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusIntervalColumnFunction() {
String query = "select \"birth_date\" + INTERVAL '1' DAY * \"store_id\" from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY * store_id\nFROM foodmart"
+ ".employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, '1' * \"store_id\", \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusIntervalIntFunction() {
String query = "select \"birth_date\" + INTERVAL '1' DAY * 10 from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 10) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY * 10\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 10 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, '1' * 10, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateSubColumnVariFunction() {
String query = "select \"birth_date\" - \"store_id\" * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - store_id * INTERVAL '1' DAY"
+ "\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" - \"store_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateValuePlusColumnVariFunction() {
String query = "select DATE'2018-01-01' + \"store_id\" * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(DATE '2018-01-01', store_id) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT DATE '2018-01-01' + store_id * INTERVAL '1' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(DATE '2018-01-01', INTERVAL store_id DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (DATE '2018-01-01' + \"store_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusColumnFunctionWithArithOp() {
String query = "select \"birth_date\" + \"store_id\" *11 * INTERVAL '1' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id * 11) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * 11 * INTERVAL '1' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id * 11 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + \"store_id\" * 11)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusColumnFunctionVariWithArithOp() {
String query = "select \"birth_date\" + \"store_id\" * INTERVAL '11' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, store_id * 11) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + store_id * INTERVAL '11' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL store_id * 11 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + \"store_id\" * 11)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDateSubColumnFunctionVariWithArithOp() {
String query = "select \"birth_date\" - \"store_id\" * INTERVAL '11' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_SUB(birth_date, store_id * 11) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date - store_id * INTERVAL '11' DAY\nFROM "
+ "foodmart.employee";
final String expectedBigQuery = "SELECT DATE_SUB(birth_date, INTERVAL store_id * 11 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" - \"store_id\" * 11)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testDatePlusIntervalDayFunctionWithArithOp() {
String query = "select \"birth_date\" + 10 * INTERVAL '2' DAY from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 10 * 2) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + 10 * INTERVAL '2' DAY\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 10 * 2 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT (\"birth_date\" + 10 * 2)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testIntervalDayPlusDateFunction() {
String query = "select INTERVAL '1' DAY + \"birth_date\" from \"employee\"";
final String expectedHive = "SELECT CAST(DATE_ADD(birth_date, 1) AS DATE)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT birth_date + INTERVAL '1' DAY\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT DATE_ADD(birth_date, INTERVAL 1 DAY)\n"
+ "FROM foodmart.employee";
final String expectedSnowflake = "SELECT DATEADD(DAY, 1, \"birth_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withHive()
.ok(expectedHive)
.withBigQuery()
.ok(expectedBigQuery)
.withSpark()
.ok(expectedSpark)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testIntervalHourToSecond() {
String query = "SELECT CURRENT_TIMESTAMP + INTERVAL '06:10:30' HOUR TO SECOND,"
+ "CURRENT_TIMESTAMP - INTERVAL '06:10:30' HOUR TO SECOND "
+ "FROM \"employee\"";
final String expectedBQ = "SELECT TIMESTAMP_ADD(CURRENT_DATETIME(), INTERVAL 22230 SECOND), "
+ "TIMESTAMP_SUB(CURRENT_DATETIME(), INTERVAL 22230 SECOND)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testUnparseMinusCallWithReturnTypeOfTimestampWithZoneToTimestampSub() {
final RelBuilder relBuilder = relBuilder();
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final RexLiteral literalTimestampLTZ =
rexBuilder.makeTimestampWithLocalTimeZoneLiteral(
new TimestampString(2022, 2, 18, 8, 23, 45), 0);
final RexLiteral intervalLiteral = rexBuilder.makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO));
final RexNode minusCall =
relBuilder.call(SqlStdOperatorTable.MINUS, literalTimestampLTZ, intervalLiteral);
final RelNode root = relBuilder
.values(new String[] {"c"}, 1)
.project(minusCall)
.build();
final String expectedBigQuery = "SELECT TIMESTAMP_SUB(TIMESTAMP '2022-02-18 08:23:45'"
+ ", INTERVAL 1 MICROSECOND) AS `$f0`";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBigQuery));
}
@Test public void testUnparsePlusCallWithReturnTypeOfTimestampWithZoneToTimestampAdd() {
final RelBuilder relBuilder = relBuilder();
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final RexLiteral literalTimestampLTZ =
rexBuilder.makeTimestampWithLocalTimeZoneLiteral(
new TimestampString(2022, 2, 18, 8, 23, 45), 0);
final RexLiteral intervalLiteral = rexBuilder.makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO));
final RexNode plusCall =
relBuilder.call(SqlStdOperatorTable.PLUS, literalTimestampLTZ, intervalLiteral);
final RelNode root = relBuilder
.values(new String[] {"c"}, 1)
.project(plusCall)
.build();
final String expectedBigQuery = "SELECT TIMESTAMP_ADD(TIMESTAMP '2022-02-18 08:23:45',"
+ " INTERVAL 1 MICROSECOND) AS `$f0`";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBigQuery));
}
@Test public void truncateFunctionEmulationForBigQuery() {
String query = "select truncate(2.30259, 3) from \"employee\"";
final String expectedBigQuery = "SELECT TRUNC(2.30259, 3)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery().ok(expectedBigQuery);
}
@Test public void truncateFunctionWithSingleOperandEmulationForBigQuery() {
String query = "select truncate(2.30259) from \"employee\"";
final String expectedBigQuery = "SELECT TRUNC(2.30259)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery().ok(expectedBigQuery);
}
@Test public void extractFunctionEmulation() {
String query = "select extract(year from \"hire_date\") from \"employee\"";
final String expectedHive = "SELECT YEAR(hire_date)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT YEAR(hire_date)\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT EXTRACT(YEAR FROM hire_date)\n"
+ "FROM foodmart.employee";
final String expectedMsSql = "SELECT YEAR([hire_date])\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(expectedMsSql);
}
@Test public void extractMinuteFunctionEmulation() {
String query = "select extract(minute from \"hire_date\") from \"employee\"";
final String expectedBigQuery = "SELECT EXTRACT(MINUTE FROM hire_date)\n"
+ "FROM foodmart.employee";
final String expectedMsSql = "SELECT DATEPART(MINUTE, [hire_date])\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(expectedMsSql);
}
@Test public void extractSecondFunctionEmulation() {
String query = "select extract(second from \"hire_date\") from \"employee\"";
final String expectedBigQuery = "SELECT EXTRACT(SECOND FROM hire_date)\n"
+ "FROM foodmart.employee";
final String expectedMsSql = "SELECT DATEPART(SECOND, [hire_date])\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(expectedMsSql);
}
@Test public void selectWithoutFromEmulationForHiveAndSparkAndBigquery() {
String query = "select 2 + 2";
final String expected = "SELECT 2 + 2";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expected);
}
@Test public void currentTimestampFunctionForHiveAndSparkAndBigquery() {
String query = "select current_timestamp";
final String expectedHiveQuery = "SELECT CURRENT_TIMESTAMP `CURRENT_TIMESTAMP`";
final String expectedSparkQuery = "SELECT CURRENT_TIMESTAMP `CURRENT_TIMESTAMP`";
final String expectedBigQuery = "SELECT CURRENT_DATETIME() AS CURRENT_TIMESTAMP";
sql(query)
.withHiveIdentifierQuoteString()
.ok(expectedHiveQuery)
.withSparkIdentifierQuoteString()
.ok(expectedSparkQuery)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void concatFunctionEmulationForHiveAndSparkAndBigQuery() {
String query = "select 'foo' || 'bar' from \"employee\"";
final String expected = "SELECT CONCAT('foo', 'bar')\n"
+ "FROM foodmart.employee";
final String mssql = "SELECT CONCAT('foo', 'bar')\n"
+ "FROM [foodmart].[employee]";
final String expectedSpark = "SELECT 'foo' || 'bar'\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expected)
.withMssql()
.ok(mssql);
}
@Test void testJsonRemove() {
String query = "select json_remove(\"product_name\", '$[0]') from \"product\"";
final String expected = "SELECT JSON_REMOVE(\"product_name\", '$[0]')\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
/*
@Test void testUnionAllWithNoOperandsUsingOracleDialect() {
String query = "select A.\"department_id\" "
+ "from \"foodmart\".\"employee\" A "
+ " where A.\"department_id\" = ( select min( A.\"department_id\") from \"foodmart\""
+ ".\"department\" B where 1=2 )";
final String expected = "SELECT \"employee\".\"department_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN (SELECT \"t1\".\"department_id\" \"department_id0\", MIN(\"t1\""
+ ".\"department_id\") \"EXPR$0\"\n"
+ "FROM (SELECT NULL \"department_id\", NULL \"department_description\"\nFROM "
+ "\"DUAL\"\nWHERE 1 = 0) \"t\",\n"
+ "(SELECT \"department_id\"\nFROM \"foodmart\".\"employee\"\nGROUP BY \"department_id\")"
+ " \"t1\"\n"
+ "GROUP BY \"t1\".\"department_id\") \"t3\" ON \"employee\".\"department_id\" = \"t3\""
+ ".\"department_id0\""
+ " AND \"employee\".\"department_id\" = \"t3\".\"EXPR$0\"";
sql(query).withOracle().ok(expected);
}*/
/*@Test void testUnionAllWithNoOperands() {
String query = "select A.\"department_id\" "
+ "from \"foodmart\".\"employee\" A "
+ " where A.\"department_id\" = ( select min( A.\"department_id\") from \"foodmart\""
+ ".\"department\" B where 1=2 )";
final String expected = "SELECT \"employee\".\"department_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN (SELECT \"t1\".\"department_id\" AS \"department_id0\","
+ " MIN(\"t1\".\"department_id\") AS \"EXPR$0\"\n"
+ "FROM (SELECT *\nFROM (VALUES (NULL, NULL))"
+ " AS \"t\" (\"department_id\", \"department_description\")"
+ "\nWHERE 1 = 0) AS \"t\","
+ "\n(SELECT \"department_id\"\nFROM \"foodmart\".\"employee\""
+ "\nGROUP BY \"department_id\") AS \"t1\""
+ "\nGROUP BY \"t1\".\"department_id\") AS \"t3\" "
+ "ON \"employee\".\"department_id\" = \"t3\".\"department_id0\""
+ " AND \"employee\".\"department_id\" = \"t3\".\"EXPR$0\"";
sql(query).ok(expected);
}*/
@Test void testSmallintOracle() {
String query = "SELECT CAST(\"department_id\" AS SMALLINT) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS NUMBER(5))\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testBigintOracle() {
String query = "SELECT CAST(\"department_id\" AS BIGINT) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS NUMBER(19))\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testDoubleOracle() {
String query = "SELECT CAST(\"department_id\" AS DOUBLE) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS DOUBLE PRECISION)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testDateLiteralOracle() {
String query = "SELECT DATE '1978-05-02' FROM \"employee\"";
String expected = "SELECT TO_DATE('1978-05-02', 'YYYY-MM-DD')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testTimestampLiteralOracle() {
String query = "SELECT TIMESTAMP '1978-05-02 12:34:56.78' FROM \"employee\"";
String expected = "SELECT TO_TIMESTAMP('1978-05-02 12:34:56.78',"
+ " 'YYYY-MM-DD HH24:MI:SS.FF')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test void testTimeLiteralOracle() {
String query = "SELECT TIME '12:34:56.78' FROM \"employee\"";
String expected = "SELECT TO_TIME('12:34:56.78', 'HH24:MI:SS.FF')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withOracle()
.ok(expected);
}
@Test public void testSelectWithGroupByOnColumnNotPresentInProjection() {
String query = "select \"t1\".\"department_id\" from\n"
+ "\"foodmart\".\"employee\" as \"t1\" inner join \"foodmart\".\"department\" as \"t2\"\n"
+ "on \"t1\".\"department_id\" = \"t2\".\"department_id\"\n"
+ "group by \"t2\".\"department_id\", \"t1\".\"department_id\"";
final String expected = "SELECT t0.department_id\n"
+ "FROM (SELECT department.department_id AS department_id0, employee.department_id\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id\n"
+ "GROUP BY department_id0, employee.department_id) AS t0";
sql(query).withBigQuery().ok(expected);
}
@Test void testSupportsDataType() {
final RelDataTypeFactory typeFactory =
new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataType booleanDataType = typeFactory.createSqlType(SqlTypeName.BOOLEAN);
final RelDataType integerDataType = typeFactory.createSqlType(SqlTypeName.INTEGER);
final SqlDialect oracleDialect = SqlDialect.DatabaseProduct.ORACLE.getDialect();
assertFalse(oracleDialect.supportsDataType(booleanDataType));
assertTrue(oracleDialect.supportsDataType(integerDataType));
final SqlDialect postgresqlDialect = SqlDialect.DatabaseProduct.POSTGRESQL.getDialect();
assertTrue(postgresqlDialect.supportsDataType(booleanDataType));
assertTrue(postgresqlDialect.supportsDataType(integerDataType));
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-4150">[CALCITE-4150]
* JDBC adapter throws UnsupportedOperationException when generating SQL
* for untyped NULL literal</a>. */
@Test void testSelectRawNull() {
final String query = "SELECT NULL FROM \"product\"";
final String expected = "SELECT NULL\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testSelectRawNullWithAlias() {
final String query = "SELECT NULL AS DUMMY FROM \"product\"";
final String expected = "SELECT NULL AS \"DUMMY\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test void testSelectNullWithCast() {
final String query = "SELECT CAST(NULL AS INT)";
final String expected = "SELECT *\n"
+ "FROM (VALUES (NULL)) AS \"t\" (\"EXPR$0\")";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithCount() {
final String query = "SELECT COUNT(CAST(NULL AS INT))";
final String expected = "SELECT COUNT(\"$f0\")\n"
+ "FROM (VALUES (NULL)) AS \"t\" (\"$f0\")";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithGroupByNull() {
final String query = "SELECT COUNT(CAST(NULL AS INT))\n"
+ "FROM (VALUES (0))AS \"t\"\n"
+ "GROUP BY CAST(NULL AS VARCHAR CHARACTER SET \"ISO-8859-1\")";
final String expected = "SELECT COUNT(\"$f1\")\n"
+ "FROM (VALUES (NULL, NULL)) AS \"t\" (\"$f0\", \"$f1\")\n"
+ "GROUP BY \"$f0\"";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithGroupByVar() {
final String query = "SELECT COUNT(CAST(NULL AS INT))\n"
+ "FROM \"account\" AS \"t\"\n"
+ "GROUP BY \"account_type\"";
final String expected = "SELECT COUNT(CAST(NULL AS INTEGER))\n"
+ "FROM \"foodmart\".\"account\"\n"
+ "GROUP BY \"account_type\"";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithInsert() {
final String query = "insert into\n"
+ "\"account\"(\"account_id\",\"account_parent\",\"account_type\",\"account_rollup\")\n"
+ "select 1, cast(NULL AS INT), cast(123 as varchar), cast(123 as varchar)";
final String expected = "INSERT INTO \"foodmart\".\"account\" ("
+ "\"account_id\", \"account_parent\", \"account_description\", "
+ "\"account_type\", \"account_rollup\", \"Custom_Members\")\n"
+ "(SELECT \"EXPR$0\" AS \"account_id\","
+ " \"EXPR$1\" AS \"account_parent\","
+ " CAST(NULL AS VARCHAR(30) CHARACTER SET \"ISO-8859-1\") "
+ "AS \"account_description\","
+ " \"EXPR$2\" AS \"account_type\", "
+ "\"EXPR$3\" AS \"account_rollup\","
+ " CAST(NULL AS VARCHAR(255) CHARACTER SET \"ISO-8859-1\") "
+ "AS \"Custom_Members\"\n"
+ "FROM (VALUES (1, NULL, '123', '123')) "
+ "AS \"t\" (\"EXPR$0\", \"EXPR$1\", \"EXPR$2\", \"EXPR$3\"))";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testSelectNullWithInsertFromJoin() {
final String query = "insert into\n"
+ "\"account\"(\"account_id\",\"account_parent\",\n"
+ "\"account_type\",\"account_rollup\")\n"
+ "select \"product\".\"product_id\",\n"
+ "cast(NULL AS INT),\n"
+ "cast(\"product\".\"product_id\" as varchar),\n"
+ "cast(\"sales_fact_1997\".\"store_id\" as varchar)\n"
+ "from \"product\"\n"
+ "inner join \"sales_fact_1997\"\n"
+ "on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"";
final String expected = "INSERT INTO \"foodmart\".\"account\" "
+ "(\"account_id\", \"account_parent\", \"account_description\", "
+ "\"account_type\", \"account_rollup\", \"Custom_Members\")\n"
+ "(SELECT \"product\".\"product_id\" AS \"account_id\", "
+ "CAST(NULL AS INTEGER) AS \"account_parent\", CAST(NULL AS VARCHAR"
+ "(30) CHARACTER SET \"ISO-8859-1\") AS \"account_description\", "
+ "CAST(\"product\".\"product_id\" AS VARCHAR CHARACTER SET "
+ "\"ISO-8859-1\") AS \"account_type\", "
+ "CAST(\"sales_fact_1997\".\"store_id\" AS VARCHAR CHARACTER SET \"ISO-8859-1\") AS "
+ "\"account_rollup\", "
+ "CAST(NULL AS VARCHAR(255) CHARACTER SET \"ISO-8859-1\") AS \"Custom_Members\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" "
+ "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\")";
sql(query).ok(expected);
// validate
sql(expected).exec();
}
@Test void testCastDecimalOverflow() {
final String query =
"SELECT CAST('11111111111111111111111111111111.111111' AS DECIMAL(38,6)) AS \"num\" from \"product\"";
final String expected =
"SELECT CAST('11111111111111111111111111111111.111111' AS DECIMAL(19, 6)) AS \"num\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
final String query2 =
"SELECT CAST(1111111 AS DECIMAL(5,2)) AS \"num\" from \"product\"";
final String expected2 =
"SELECT CAST(1111111 AS DECIMAL(5, 2)) AS \"num\"\nFROM \"foodmart\".\"product\"";
sql(query2).ok(expected2);
}
@Test void testCastInStringIntegerComparison() {
final String query = "select \"employee_id\" "
+ "from \"foodmart\".\"employee\" "
+ "where 10 = cast('10' as int) and \"birth_date\" = cast('1914-02-02' as date) or "
+ "\"hire_date\" = cast('1996-01-01 '||'00:00:00' as timestamp)";
final String expected = "SELECT \"employee_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "WHERE 10 = '10' AND \"birth_date\" = '1914-02-02' OR \"hire_date\" = '1996-01-01 ' || "
+ "'00:00:00'";
final String expectedBiqquery = "SELECT employee_id\n"
+ "FROM foodmart.employee\n"
+ "WHERE 10 = CAST('10' AS INT64) AND birth_date = '1914-02-02' OR hire_date = "
+ "CAST(CONCAT('1996-01-01 ', '00:00:00') AS DATETIME)";
sql(query)
.ok(expected)
.withBigQuery()
.ok(expectedBiqquery);
}
@Test void testDialectQuoteStringLiteral() {
dialects().forEach((dialect, databaseProduct) -> {
assertThat(dialect.quoteStringLiteral(""), is("''"));
assertThat(dialect.quoteStringLiteral("can't run"),
databaseProduct == DatabaseProduct.BIG_QUERY
? is("'can\\'t run'")
: is("'can''t run'"));
assertThat(dialect.unquoteStringLiteral("''"), is(""));
if (databaseProduct == DatabaseProduct.BIG_QUERY) {
assertThat(dialect.unquoteStringLiteral("'can\\'t run'"),
is("can't run"));
} else {
assertThat(dialect.unquoteStringLiteral("'can't run'"),
is("can't run"));
}
});
}
@Test public void testToNumberFunctionHandlingHexaToInt() {
String query = "select TO_NUMBER('03ea02653f6938ba','XXXXXXXXXXXXXXXX')";
final String expected = "SELECT CAST(CONV('03ea02653f6938ba', 16, 10) AS BIGINT)";
final String expectedBigQuery = "SELECT CAST(CONCAT('0x', '03ea02653f6938ba') AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('03ea02653f6938ba', 'XXXXXXXXXXXXXXXX')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingFloatingPoint() {
String query = "select TO_NUMBER('-1.7892','9.9999')";
final String expected = "SELECT CAST('-1.7892' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-1.7892' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1.7892', 38, 4)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionWithColumns() {
String query = "SELECT TO_NUMBER(\"first_name\", '000') FROM \"foodmart\""
+ ".\"employee\"";
final String expectedBigQuery = "SELECT CAST(first_name AS INT64)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testOver() {
String query = "SELECT distinct \"product_id\", MAX(\"product_id\") \n"
+ "OVER(PARTITION BY \"product_id\") AS abc\n"
+ "FROM \"product\"";
final String expected = "SELECT product_id, MAX(product_id) OVER "
+ "(PARTITION BY product_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) ABC\n"
+ "FROM foodmart.product\n"
+ "GROUP BY product_id, MAX(product_id) OVER (PARTITION BY product_id "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)";
final String expectedBQ = "SELECT product_id, ABC\n"
+ "FROM (SELECT product_id, MAX(product_id) OVER "
+ "(PARTITION BY product_id RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS ABC\n"
+ "FROM foodmart.product) AS t\n"
+ "GROUP BY product_id, ABC";
final String expectedSnowFlake = "SELECT \"product_id\", MAX(\"product_id\") OVER "
+ "(PARTITION BY \"product_id\" ORDER BY \"product_id\" ROWS "
+ "BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS \"ABC\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\", MAX(\"product_id\") OVER (PARTITION BY \"product_id\" "
+ "ORDER BY \"product_id\" ROWS BETWEEN UNBOUNDED PRECEDING AND "
+ "UNBOUNDED FOLLOWING)";
final String mssql = "SELECT [product_id], MAX([product_id]) OVER (PARTITION "
+ "BY [product_id] ORDER BY [product_id] ROWS BETWEEN UNBOUNDED PRECEDING AND "
+ "UNBOUNDED FOLLOWING) AS [ABC]\n"
+ "FROM [foodmart].[product]\n"
+ "GROUP BY [product_id], MAX([product_id]) OVER (PARTITION BY [product_id] "
+ "ORDER BY [product_id] ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(mssql);
}
@Test public void testNtileFunction() {
String query = "SELECT ntile(2)\n"
+ "OVER(order BY \"product_id\") AS abc\n"
+ "FROM \"product\"";
final String expectedBQ = "SELECT NTILE(2) OVER (ORDER BY product_id IS NULL, product_id) "
+ "AS ABC\n"
+ "FROM foodmart.product";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testCountWithWindowFunction() {
String query = "Select count(*) over() from \"product\"";
String expected = "SELECT COUNT(*) OVER (RANGE BETWEEN UNBOUNDED PRECEDING "
+ "AND UNBOUNDED FOLLOWING)\n"
+ "FROM foodmart.product";
String expectedBQ = "SELECT COUNT(*) OVER (RANGE BETWEEN UNBOUNDED PRECEDING "
+ "AND UNBOUNDED FOLLOWING)\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT COUNT(*) OVER (ORDER BY 0 "
+ "ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)\n"
+ "FROM \"foodmart\".\"product\"";
final String mssql = "SELECT COUNT(*) OVER ()\n"
+ "FROM [foodmart].[product]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(mssql);
}
@Test public void testOrderByInWindowFunction() {
String query = "select \"first_name\", COUNT(\"department_id\") as "
+ "\"department_id_number\", ROW_NUMBER() OVER (ORDER BY "
+ "\"department_id\" ASC), SUM(\"department_id\") OVER "
+ "(ORDER BY \"department_id\" ASC) \n"
+ "from \"foodmart\".\"employee\" \n"
+ "GROUP by \"first_name\", \"department_id\"";
final String expected = "SELECT first_name, department_id_number, ROW_NUMBER() "
+ "OVER (ORDER BY department_id IS NULL, department_id), SUM(department_id) "
+ "OVER (ORDER BY department_id IS NULL, department_id "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT first_name, department_id, COUNT(*) department_id_number\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, department_id) t0";
final String expectedSpark = "SELECT first_name, department_id_number, ROW_NUMBER() "
+ "OVER (ORDER BY department_id NULLS LAST), SUM(department_id) "
+ "OVER (ORDER BY department_id NULLS LAST "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT first_name, department_id, COUNT(*) department_id_number\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, department_id) t0";
final String expectedBQ = "SELECT first_name, department_id_number, "
+ "ROW_NUMBER() OVER (ORDER BY department_id IS NULL, department_id), SUM(department_id) "
+ "OVER (ORDER BY department_id IS NULL, department_id "
+ "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT first_name, department_id, COUNT(*) AS department_id_number\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, department_id) AS t0";
final String expectedSnowFlake = "SELECT \"first_name\", \"department_id_number\", "
+ "ROW_NUMBER() OVER (ORDER BY \"department_id\"), SUM(\"department_id\") "
+ "OVER (ORDER BY \"department_id\" RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT \"first_name\", \"department_id\", COUNT(*) AS \"department_id_number\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"first_name\", \"department_id\") AS \"t0\"";
final String mssql = "SELECT [first_name], [department_id_number], ROW_NUMBER()"
+ " OVER (ORDER BY CASE WHEN [department_id] IS NULL THEN 1 ELSE 0 END,"
+ " [department_id]), SUM([department_id]) OVER (ORDER BY CASE WHEN [department_id] IS NULL"
+ " THEN 1 ELSE 0 END, [department_id] RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM (SELECT [first_name], [department_id], COUNT(*) AS [department_id_number]\n"
+ "FROM [foodmart].[employee]\n"
+ "GROUP BY [first_name], [department_id]) AS [t0]";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(mssql);
}
@Test public void testToNumberFunctionHandlingFloatingPointWithD() {
String query = "select TO_NUMBER('1.789','9D999')";
final String expected = "SELECT CAST('1.789' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('1.789' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1.789', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithSingleFloatingPoint() {
String query = "select TO_NUMBER('1.789')";
final String expected = "SELECT CAST('1.789' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('1.789' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1.789', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithComma() {
String query = "SELECT TO_NUMBER ('1,789', '9,999')";
final String expected = "SELECT CAST('1789' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1789' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1,789', '9,999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrency() {
String query = "SELECT TO_NUMBER ('$1789', '$9999')";
final String expected = "SELECT CAST('1789' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1789' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('$1789', '$9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyAndL() {
String query = "SELECT TO_NUMBER ('$1789', 'L9999')";
final String expected = "SELECT CAST('1789' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1789' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('$1789', '$9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMinus() {
String query = "SELECT TO_NUMBER ('-12334', 'S99999')";
final String expected = "SELECT CAST('-12334' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-12334' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-12334', 'S99999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMinusLast() {
String query = "SELECT TO_NUMBER ('12334-', '99999S')";
final String expected = "SELECT CAST('-12334' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-12334' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('12334-', '99999S')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithE() {
String query = "SELECT TO_NUMBER ('12E3', '99EEEE')";
final String expected = "SELECT CAST('12E3' AS DECIMAL(19, 0))";
final String expectedBigQuery = "SELECT CAST('12E3' AS NUMERIC)";
final String expectedSnowFlake = "SELECT TO_NUMBER('12E3', '99EEEE')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyName() {
String query = "SELECT TO_NUMBER('dollar1234','L9999','NLS_CURRENCY=''dollar''')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyNameFloat() {
String query = "SELECT TO_NUMBER('dollar12.34','L99D99','NLS_CURRENCY=''dollar''')";
final String expected = "SELECT CAST('12.34' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('12.34' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('12.34', 38, 2)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyNameNull() {
String query = "SELECT TO_NUMBER('dollar12.34','L99D99',null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithCurrencyNameMinus() {
String query = "SELECT TO_NUMBER('-dollar1234','L9999','NLS_CURRENCY=''dollar''')";
final String expected = "SELECT CAST('-1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithG() {
String query = "SELECT TO_NUMBER ('1,2345', '9G9999')";
final String expected = "SELECT CAST('12345' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('12345' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1,2345', '9G9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithU() {
String query = "SELECT TO_NUMBER ('$1234', 'U9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('$1234', '$9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithPR() {
String query = "SELECT TO_NUMBER (' 123 ', '999PR')";
final String expected = "SELECT CAST('123' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('123' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('123')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMI() {
String query = "SELECT TO_NUMBER ('1234-', '9999MI')";
final String expected = "SELECT CAST('-1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('-1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234-', '9999MI')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithMIDecimal() {
String query = "SELECT TO_NUMBER ('1.234-', '9.999MI')";
final String expected = "SELECT CAST('-1.234' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-1.234' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1.234', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithZero() {
String query = "select TO_NUMBER('01234','09999')";
final String expected = "SELECT CAST('01234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('01234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('01234', '09999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithB() {
String query = "select TO_NUMBER('1234','B9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234', 'B9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithC() {
String query = "select TO_NUMBER('USD1234','C9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandling() {
final String query = "SELECT TO_NUMBER ('1234', '9999')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234', '9999')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingSingleArgumentInt() {
final String query = "SELECT TO_NUMBER ('1234')";
final String expected = "SELECT CAST('1234' AS BIGINT)";
final String expectedBigQuery = "SELECT CAST('1234' AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('1234')";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingSingleArgumentFloat() {
final String query = "SELECT TO_NUMBER ('-1.234')";
final String expected = "SELECT CAST('-1.234' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-1.234' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-1.234', 38, 3)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingNull() {
final String query = "SELECT TO_NUMBER ('-1.234',null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingNullOperand() {
final String query = "SELECT TO_NUMBER (null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingSecoNull() {
final String query = "SELECT TO_NUMBER(null,'9D99')";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingFunctionAsArgument() {
final String query = "SELECT TO_NUMBER(SUBSTRING('12345',2))";
final String expected = "SELECT CAST(SUBSTRING('12345', 2) AS BIGINT)";
final String expectedSpark = "SELECT CAST(SUBSTRING('12345', 2) AS BIGINT)";
final String expectedBigQuery = "SELECT CAST(SUBSTR('12345', 2) AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(SUBSTR('12345', 2))";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithNullArgument() {
final String query = "SELECT TO_NUMBER (null)";
final String expected = "SELECT CAST(NULL AS INT)";
final String expectedBigQuery = "SELECT CAST(NULL AS INT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER(NULL)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingCaseWhenThen() {
final String query = "select case when TO_NUMBER('12.77') is not null then "
+ "'is_numeric' else 'is not numeric' end";
final String expected = "SELECT CASE WHEN CAST('12.77' AS FLOAT) IS NOT NULL THEN "
+ "'is_numeric ' ELSE 'is not numeric' END";
final String expectedBigQuery = "SELECT CASE WHEN CAST('12.77' AS FLOAT64) IS NOT NULL THEN "
+ "'is_numeric ' ELSE 'is not numeric' END";
final String expectedSnowFlake = "SELECT CASE WHEN TO_NUMBER('12.77', 38, 2) IS NOT NULL THEN"
+ " 'is_numeric ' ELSE 'is not numeric' END";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testToNumberFunctionHandlingWithGDS() {
String query = "SELECT TO_NUMBER ('12,454.8-', '99G999D9S')";
final String expected = "SELECT CAST('-12454.8' AS FLOAT)";
final String expectedBigQuery = "SELECT CAST('-12454.8' AS FLOAT64)";
final String expectedSnowFlake = "SELECT TO_NUMBER('-12454.8', 38, 1)";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expected);
}
@Test public void testAscii() {
String query = "SELECT ASCII ('ABC')";
final String expected = "SELECT ASCII('ABC')";
final String expectedBigQuery = "SELECT TO_CODE_POINTS('ABC') [OFFSET(0)]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withHive()
.ok(expected)
.withSpark()
.ok(expected);
}
@Test public void testAsciiMethodArgument() {
String query = "SELECT ASCII (SUBSTRING('ABC',1,1))";
final String expected = "SELECT ASCII(SUBSTRING('ABC', 1, 1))";
final String expectedSpark = "SELECT ASCII(SUBSTRING('ABC', 1, 1))";
final String expectedBigQuery = "SELECT TO_CODE_POINTS(SUBSTR('ABC', 1, 1)) [OFFSET(0)]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withHive()
.ok(expected)
.withSpark()
.ok(expectedSpark);
}
@Test public void testAsciiColumnArgument() {
final String query = "select ASCII(\"product_name\") from \"product\" ";
final String bigQueryExpected = "SELECT TO_CODE_POINTS(product_name) [OFFSET(0)]\n"
+ "FROM foodmart.product";
final String hiveExpected = "SELECT ASCII(product_name)\n"
+ "FROM foodmart.product";
sql(query)
.withBigQuery()
.ok(bigQueryExpected)
.withHive()
.ok(hiveExpected);
}
@Test public void testNullIfFunctionRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode nullifRexNode = builder.call(SqlStdOperatorTable.NULLIF,
builder.scan("EMP").field(0), builder.literal(20));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(nullifRexNode, "NI"))
.build();
final String expectedSql = "SELECT NULLIF(\"EMPNO\", 20) AS \"NI\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT NULLIF(EMPNO, 20) AS NI\n"
+ "FROM scott.EMP";
final String expectedSpark = "SELECT NULLIF(EMPNO, 20) NI\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT IF(EMPNO = 20, NULL, EMPNO) NI\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
}
@Test public void testCurrentUser() {
String query = "select CURRENT_USER";
final String expectedSql = "SELECT CURRENT_USER() CURRENT_USER";
final String expectedSqlBQ = "SELECT SESSION_USER() AS CURRENT_USER";
sql(query)
.withHive()
.ok(expectedSql)
.withBigQuery()
.ok(expectedSqlBQ);
}
@Test public void testCurrentUserWithAlias() {
String query = "select CURRENT_USER myuser from \"product\" where \"product_id\" = 1";
final String expectedSql = "SELECT CURRENT_USER() MYUSER\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1";
final String expected = "SELECT SESSION_USER() AS MYUSER\n"
+ "FROM foodmart.product\n"
+ "WHERE product_id = 1";
sql(query)
.withHive()
.ok(expectedSql)
.withBigQuery()
.ok(expected);
}
@Test void testSelectCountStar() {
final String query = "select count(*) from \"product\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
Sql sql = sql(query);
sql.ok(expected);
}
@Test void testRowValueExpression() {
String sql = "insert into \"DEPT\"\n"
+ "values ROW(1,'Fred', 'San Francisco'),\n"
+ " ROW(2, 'Eric', 'Washington')";
final String expectedDefault = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedDefaultX = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
final String expectedHive = "INSERT INTO SCOTT.DEPT (DEPTNO, DNAME, LOC)\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedHiveX = "INSERT INTO SCOTT.DEPT (DEPTNO, DNAME, LOC)\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'";
final String expectedMysql = "INSERT INTO `SCOTT`.`DEPT`"
+ " (`DEPTNO`, `DNAME`, `LOC`)\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedMysqlX = "INSERT INTO `SCOTT`.`DEPT`"
+ " (`DEPTNO`, `DNAME`, `LOC`)\nSELECT 1, 'Fred', 'San Francisco'\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'";
final String expectedOracle = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedOracleX = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "FROM \"DUAL\"\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM \"DUAL\"";
final String expectedMssql = "INSERT INTO [SCOTT].[DEPT]"
+ " ([DEPTNO], [DNAME], [LOC])\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedMssqlX = "INSERT INTO [SCOTT].[DEPT]"
+ " ([DEPTNO], [DNAME], [LOC])\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'";
final String expectedCalcite = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "VALUES (1, 'Fred', 'San Francisco'),\n"
+ "(2, 'Eric', 'Washington')";
final String expectedCalciteX = "INSERT INTO \"SCOTT\".\"DEPT\""
+ " (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT 1, 'Fred', 'San Francisco'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expectedDefault)
.withHive().ok(expectedHive)
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
.withMssql().ok(expectedMssql)
.withCalcite().ok(expectedCalcite)
.withConfig(c ->
c.withRelBuilderConfigTransform(b ->
b.withSimplifyValues(false)))
.withCalcite().ok(expectedDefaultX)
.withHive().ok(expectedHiveX)
.withMysql().ok(expectedMysqlX)
.withOracle().ok(expectedOracleX)
.withMssql().ok(expectedMssqlX)
.withCalcite().ok(expectedCalciteX);
}
@Test void testInsertValuesWithDynamicParams() {
final String sql = "insert into \"DEPT\" values (?,?,?), (?,?,?)";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected);
}
@Test void testInsertValuesWithExplicitColumnsAndDynamicParams() {
final String sql = ""
+ "insert into \"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "values (?,?,?), (?,?,?)";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")\n"
+ "UNION ALL\n"
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected);
}
@Test void testTableFunctionScan() {
final String query = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR(select \"product_id\", \"product_name\"\n"
+ "from \"product\"), CURSOR(select \"employee_id\", \"full_name\"\n"
+ "from \"employee\"), 'NAME'))";
final String expected = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR ((SELECT \"product_id\", \"product_name\"\n"
+ "FROM \"foodmart\".\"product\")), CURSOR ((SELECT \"employee_id\", \"full_name\"\n"
+ "FROM \"foodmart\".\"employee\")), 'NAME'))";
sql(query).ok(expected);
final String query2 = "select * from table(ramp(3))";
sql(query2).ok("SELECT *\n"
+ "FROM TABLE(RAMP(3))");
}
@Test void testTableFunctionScanWithComplexQuery() {
final String query = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR(select \"product_id\", \"product_name\"\n"
+ "from \"product\"\n"
+ "where \"net_weight\" > 100 and \"product_name\" = 'Hello World')\n"
+ ",CURSOR(select \"employee_id\", \"full_name\"\n"
+ "from \"employee\"\n"
+ "group by \"employee_id\", \"full_name\"), 'NAME'))";
final String expected = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR ((SELECT \"product_id\", \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" > 100 AND \"product_name\" = 'Hello World')), "
+ "CURSOR ((SELECT \"employee_id\", \"full_name\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"employee_id\", \"full_name\")), 'NAME'))";
sql(query).ok(expected);
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-3593">[CALCITE-3593]
* RelToSqlConverter changes target of ambiguous HAVING clause with a Project
* on Filter on Aggregate</a>. */
/*@Test void testBigQueryHaving() {
final String sql = ""
+ "SELECT \"DEPTNO\" - 10 \"DEPT\"\n"
+ "FROM \"EMP\"\n"
+ "GROUP BY \"DEPTNO\"\n"
+ "HAVING \"DEPTNO\" > 0";
final String expected = ""
+ "SELECT DEPTNO - 10 AS DEPTNO\n"
+ "FROM (SELECT DEPTNO\n"
+ "FROM SCOTT.EMP\n"
+ "GROUP BY DEPTNO\n"
+ "HAVING DEPTNO > 0) AS t1";
// Parse the input SQL with PostgreSQL dialect,
// in which "isHavingAlias" is false.
final SqlParser.Config parserConfig =
PostgresqlSqlDialect.DEFAULT.configureParser(SqlParser.config());
// Convert rel node to SQL with BigQuery dialect,
// in which "isHavingAlias" is true.
sql(sql)
.parserConfig(parserConfig)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.withBigQuery()
.ok(expected);
}
*/
@Test public void testCastToTimestamp() {
String query = "SELECT cast(\"birth_date\" as TIMESTAMP) "
+ "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT CAST(birth_date AS TIMESTAMP)\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(birth_date AS DATETIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTimestampWithPrecision() {
String query = "SELECT cast(\"birth_date\" as TIMESTAMP(3)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT CAST(DATE_FORMAT(CAST(birth_date AS TIMESTAMP), "
+ "'yyyy-MM-dd HH:mm:ss.sss') AS TIMESTAMP)\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT CAST(DATE_FORMAT(CAST(birth_date AS TIMESTAMP), "
+ "'yyyy-MM-dd HH:mm:ss.SSS') AS TIMESTAMP)\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(FORMAT_TIMESTAMP('%F %H:%M:%E3S', CAST"
+ "(birth_date AS DATETIME)) AS DATETIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTime() {
String query = "SELECT cast(\"hire_date\" as TIME) "
+ "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT SPLIT(DATE_FORMAT(hire_date, 'yyyy-MM-dd HH:mm:ss'), ' ')[1]\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(hire_date AS TIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTimeWithPrecision() {
String query = "SELECT cast(\"hire_date\" as TIME(5)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT SPLIT(DATE_FORMAT(hire_date, 'yyyy-MM-dd HH:mm:ss.sss'), "
+ "' ')[1]\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT SPLIT(DATE_FORMAT(hire_date, 'yyyy-MM-dd HH:mm:ss.SSS'),"
+ " ' ')[1]\nFROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(FORMAT_TIME('%H:%M:%E3S', CAST(hire_date AS TIME))"
+ " AS TIME)\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testCastToTimeWithPrecisionWithStringInput() {
String query = "SELECT cast('12:00'||':05' as TIME(5)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT CONCAT('12:00', ':05')\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT '12:00' || ':05'\n"
+ "FROM foodmart.employee";
final String expectedBigQuery = "SELECT CAST(FORMAT_TIME('%H:%M:%E3S', CAST(CONCAT('12:00', "
+ "':05') AS TIME)) AS TIME)\n"
+ "FROM foodmart.employee";
final String mssql = "SELECT CAST(CONCAT('12:00', ':05') AS TIME(3))\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery)
.withMssql()
.ok(mssql);
}
@Test public void testCastToTimeWithPrecisionWithStringLiteral() {
String query = "SELECT cast('12:00:05' as TIME(3)) "
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT '12:00:05'\n"
+ "FROM foodmart.employee";
final String expectedSpark = expectedHive;
final String expectedBigQuery = "SELECT TIME '12:00:05.000'\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBigQuery);
}
@Test public void testFormatDateRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatDateRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("YYYY-MM-DD"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_DATE('YYYY-MM-DD', \"HIREDATE\") AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_DATE('%F', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT DATE_FORMAT(HIREDATE, 'yyyy-MM-dd') FD\n"
+ "FROM scott.EMP";
final String expectedSnowFlake = "SELECT TO_VARCHAR(\"HIREDATE\", 'YYYY-MM-DD') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSpark = expectedHive;
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSnowFlake));
}
@Test public void testDOMAndDOY() {
final RelBuilder builder = relBuilder();
final RexNode dayOfMonthRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("W"), builder.scan("EMP").field(4));
final RexNode dayOfYearRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("WW"), builder.scan("EMP").field(4));
final RelNode domRoot = builder
.scan("EMP")
.project(builder.alias(dayOfMonthRexNode, "FD"))
.build();
final RelNode doyRoot = builder
.scan("EMP")
.project(builder.alias(dayOfYearRexNode, "FD"))
.build();
final String expectedDOMBiqQuery = "SELECT CAST(CEIL(EXTRACT(DAY "
+ "FROM HIREDATE) / 7) AS STRING) AS FD\n"
+ "FROM scott.EMP";
final String expectedDOYBiqQuery = "SELECT CAST(CEIL(EXTRACT(DAYOFYEAR "
+ "FROM HIREDATE) / 7) AS STRING) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(doyRoot, DatabaseProduct.BIG_QUERY.getDialect()),
isLinux(expectedDOYBiqQuery));
assertThat(toSql(domRoot, DatabaseProduct.BIG_QUERY.getDialect()),
isLinux(expectedDOMBiqQuery));
}
@Test public void testFormatTimestampRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYY-MM-DD HH:MI:SS.S(5)"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('YYYY-MM-DD HH:MI:SS.S(5)', \"HIREDATE\") "
+ "AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSpark = "SELECT DATE_FORMAT(HIREDATE, 'yyyy-MM-dd hh:mm:ss.SSSSS') FD\n"
+ "FROM scott.EMP";
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%F %I:%M:%E5S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT DATE_FORMAT(HIREDATE, 'yyyy-MM-dd hh:mm:ss.sssss') FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
}
@Test public void testFormatTimestampFormatsRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode2 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("HH24MI"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode3 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("HH24MISS"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode4 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHH24MISS"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode5 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHHMISS"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode6 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHH24MI"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode7 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDDHH24"), builder.scan("EMP").field(4));
final RexNode formatTimestampRexNode8 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("MS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode2, "FD2"),
builder.alias(formatTimestampRexNode3, "FD3"),
builder.alias(formatTimestampRexNode4, "FD4"),
builder.alias(formatTimestampRexNode5, "FD5"),
builder.alias(formatTimestampRexNode6, "FD6"),
builder.alias(formatTimestampRexNode7, "FD7"),
builder.alias(formatTimestampRexNode8, "FD8"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('HH24MI', \"HIREDATE\") AS \"FD2\", "
+ "FORMAT_TIMESTAMP('HH24MISS', \"HIREDATE\") AS \"FD3\", "
+ "FORMAT_TIMESTAMP('YYYYMMDDHH24MISS', \"HIREDATE\") AS \"FD4\", "
+ "FORMAT_TIMESTAMP('YYYYMMDDHHMISS', \"HIREDATE\") AS \"FD5\", FORMAT_TIMESTAMP"
+ "('YYYYMMDDHH24MI', \"HIREDATE\") AS \"FD6\", FORMAT_TIMESTAMP('YYYYMMDDHH24', "
+ "\"HIREDATE\") AS \"FD7\", FORMAT_TIMESTAMP('MS', \"HIREDATE\") AS \"FD8\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%H%M', HIREDATE) AS FD2, "
+ "FORMAT_TIMESTAMP('%H%M%S', HIREDATE) AS FD3, FORMAT_TIMESTAMP('%Y%m%d%H%M%S', "
+ "HIREDATE) AS FD4, FORMAT_TIMESTAMP('%Y%m%d%I%M%S', HIREDATE) AS FD5, FORMAT_TIMESTAMP"
+ "('%Y%m%d%H%M', HIREDATE) AS FD6, FORMAT_TIMESTAMP('%Y%m%d%H', HIREDATE) AS FD7, "
+ "FORMAT_TIMESTAMP('%E', HIREDATE) AS FD8\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testFormatTimeRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatTimeRexNode = builder.call(SqlLibraryOperators.FORMAT_TIME,
builder.literal("HH:MI:SS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimeRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_TIME('HH:MI:SS', \"HIREDATE\") AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_TIME('%I:%M:%S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT DATE_FORMAT(HIREDATE, 'hh:mm:ss') FD\n"
+ "FROM scott.EMP";
final String expectedSpark = expectedHive;
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
}
@Test public void testStrToDateRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode strToDateNode1 = builder.call(SqlLibraryOperators.STR_TO_DATE,
builder.literal("20181106"), builder.literal("YYYYMMDD"));
final RexNode strToDateNode2 = builder.call(SqlLibraryOperators.STR_TO_DATE,
builder.literal("2018/11/06"), builder.literal("YYYY/MM/DD"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(strToDateNode1, "date1"), builder.alias(strToDateNode2, "date2"))
.build();
final String expectedSql = "SELECT STR_TO_DATE('20181106', 'YYYYMMDD') AS \"date1\", "
+ "STR_TO_DATE('2018/11/06', 'YYYY/MM/DD') AS \"date2\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT PARSE_DATE('%Y%m%d', '20181106') AS date1, "
+ "PARSE_DATE('%Y/%m/%d', '2018/11/06') AS date2\n"
+ "FROM scott.EMP";
final String expectedHive = "SELECT CAST(FROM_UNIXTIME("
+ "UNIX_TIMESTAMP('20181106', 'yyyyMMdd'), 'yyyy-MM-dd') AS DATE) date1, "
+ "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP('2018/11/06', 'yyyy/MM/dd'), 'yyyy-MM-dd') AS DATE) date2\n"
+ "FROM scott.EMP";
final String expectedSpark = expectedHive;
final String expectedSnowflake =
"SELECT TO_DATE('20181106', 'YYYYMMDD') AS \"date1\", "
+ "TO_DATE('2018/11/06', 'YYYY/MM/DD') AS \"date2\"\n"
+ "FROM \"scott\".\"EMP\"";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.HIVE.getDialect()), isLinux(expectedHive));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSnowflake));
}
@Test public void testFormatDatetimeRelToSql() {
final RelBuilder builder = relBuilder();
final RexNode formatDateNode1 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("DDMMYY"), builder.literal("2008-12-25 15:30:00"));
final RexNode formatDateNode2 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("YY/MM/DD"), builder.literal("2012-12-25 12:50:10"));
final RexNode formatDateNode3 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("YY-MM-01"), builder.literal("2012-12-25 12:50:10"));
final RexNode formatDateNode4 = builder.call(SqlLibraryOperators.FORMAT_DATETIME,
builder.literal("YY-MM-DD 00:00:00"), builder.literal("2012-12-25 12:50:10"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateNode1, "date1"),
builder.alias(formatDateNode2, "date2"),
builder.alias(formatDateNode3, "date3"),
builder.alias(formatDateNode4, "date4"))
.build();
final String expectedSql = "SELECT FORMAT_DATETIME('DDMMYY', '2008-12-25 15:30:00') AS "
+ "\"date1\", FORMAT_DATETIME('YY/MM/DD', '2012-12-25 12:50:10') AS \"date2\", "
+ "FORMAT_DATETIME('YY-MM-01', '2012-12-25 12:50:10') AS \"date3\", FORMAT_DATETIME"
+ "('YY-MM-DD 00:00:00', '2012-12-25 12:50:10') AS \"date4\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_DATETIME('%d%m%y', '2008-12-25 15:30:00') "
+ "AS date1, FORMAT_DATETIME('%y/%m/%d', '2012-12-25 12:50:10') AS date2,"
+ " FORMAT_DATETIME('%y-%m-01', '2012-12-25 12:50:10') AS date3,"
+ " FORMAT_DATETIME('%y-%m-%d 00:00:00', '2012-12-25 12:50:10') AS date4\n"
+ "FROM scott.EMP";
final String expectedSpark = "SELECT DATE_FORMAT('2008-12-25 15:30:00', 'ddMMyy') date1, "
+ "DATE_FORMAT('2012-12-25 12:50:10', 'yy/MM/dd') date2,"
+ " DATE_FORMAT('2012-12-25 12:50:10', 'yy-MM-01') date3,"
+ " DATE_FORMAT('2012-12-25 12:50:10', 'yy-MM-dd 00:00:00') date4\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSpark));
}
@Test public void testParseTimestampFunctionFormat() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd HH24:MI:SS"), builder.literal("2009-03-20 12:25:50"));
final RexNode parseTSNode2 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("MI dd-YYYY-MM SS HH24"), builder.literal("25 20-2009-03 50 12"));
final RexNode parseTSNode3 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@MM@dd@hh@mm@ss"), builder.literal("20200903020211"));
final RexNode parseTSNode4 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@MM@dd@HH@mm@ss"), builder.literal("20200903210211"));
final RexNode parseTSNode5 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("HH@mm@ss"), builder.literal("215313"));
final RexNode parseTSNode6 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("MM@dd@yy"), builder.literal("090415"));
final RexNode parseTSNode7 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("MM@dd@yy"), builder.literal("Jun1215"));
final RexNode parseTSNode8 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@MM@dd@HH"), builder.literal("2015061221"));
final RexNode parseTSNode9 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@dd@mm"), builder.literal("20150653"));
final RexNode parseTSNode10 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy@mm@dd"), builder.literal("20155308"));
final RexNode parseTSNode11 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd@HH:mm:ss"), builder.literal("2009-03-2021:25:50"));
final RexNode parseTSNode12 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd@hh:mm:ss"), builder.literal("2009-03-2007:25:50"));
final RexNode parseTSNode13 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd@hh:mm:ss z"), builder.literal("2009-03-20 12:25:50.222"));
final RexNode parseTSNode14 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYY-MM-dd'T'hh:mm:ss"), builder.literal("2012-05-09T04:12:12"));
final RexNode parseTSNode15 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy- MM-dd HH: -mm:ss"), builder.literal("2015- 09-11 09: -07:23"));
final RexNode parseTSNode16 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy- MM-dd@HH: -mm:ss"), builder.literal("2015- 09-1109: -07:23"));
final RexNode parseTSNode17 = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("yyyy-MM-dd-HH:mm:ss.S(3)@ZZ"), builder.literal("2015-09-11-09:07:23"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date1"), builder.alias(parseTSNode2, "date2"),
builder.alias(parseTSNode3, "timestamp1"), builder.alias(parseTSNode4, "timestamp2"),
builder.alias(parseTSNode5, "time1"), builder.alias(parseTSNode6, "date1"),
builder.alias(parseTSNode7, "date2"), builder.alias(parseTSNode8, "date3"),
builder.alias(parseTSNode9, "date5"),
builder.alias(parseTSNode10, "date6"), builder.alias(parseTSNode11, "timestamp3"),
builder.alias(parseTSNode12, "timestamp4"), builder.alias(parseTSNode13, "timestamp5"),
builder.alias(parseTSNode14, "timestamp6"), builder.alias(parseTSNode15, "timestamp7"),
builder.alias(parseTSNode16, "timestamp8"), builder.alias(parseTSNode17, "timestamp9"))
.build();
final String expectedSql =
"SELECT PARSE_TIMESTAMP('YYYY-MM-dd HH24:MI:SS', '2009-03-20 12:25:50') AS \"date1\","
+ " PARSE_TIMESTAMP('MI dd-YYYY-MM SS HH24', '25 20-2009-03 50 12') AS \"date2\","
+ " PARSE_TIMESTAMP('yyyy@MM@dd@hh@mm@ss', '20200903020211') AS \"timestamp1\","
+ " PARSE_TIMESTAMP('yyyy@MM@dd@HH@mm@ss', '20200903210211') AS \"timestamp2\","
+ " PARSE_TIMESTAMP('HH@mm@ss', '215313') AS \"time1\", "
+ "PARSE_TIMESTAMP('MM@dd@yy', '090415') AS \"date10\", "
+ "PARSE_TIMESTAMP('MM@dd@yy', 'Jun1215') AS \"date20\", "
+ "PARSE_TIMESTAMP('yyyy@MM@dd@HH', '2015061221') AS \"date3\", "
+ "PARSE_TIMESTAMP('yyyy@dd@mm', '20150653') AS \"date5\", "
+ "PARSE_TIMESTAMP('yyyy@mm@dd', '20155308') AS \"date6\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd@HH:mm:ss', '2009-03-2021:25:50') AS \"timestamp3\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd@hh:mm:ss', '2009-03-2007:25:50') AS \"timestamp4\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd@hh:mm:ss z', '2009-03-20 12:25:50.222') AS \"timestamp5\", "
+ "PARSE_TIMESTAMP('YYYY-MM-dd''T''hh:mm:ss', '2012-05-09T04:12:12') AS \"timestamp6\""
+ ", PARSE_TIMESTAMP('yyyy- MM-dd HH: -mm:ss', '2015- 09-11 09: -07:23') AS \"timestamp7\""
+ ", PARSE_TIMESTAMP('yyyy- MM-dd@HH: -mm:ss', '2015- 09-1109: -07:23') AS \"timestamp8\""
+ ", PARSE_TIMESTAMP('yyyy-MM-dd-HH:mm:ss.S(3)@ZZ', '2015-09-11-09:07:23') AS \"timestamp9\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery =
"SELECT PARSE_DATETIME('%F %H:%M:%S', '2009-03-20 12:25:50') AS date1,"
+ " PARSE_DATETIME('%M %d-%Y-%m %S %H', '25 20-2009-03 50 12') AS date2,"
+ " PARSE_DATETIME('%Y%m%d%I%m%S', '20200903020211') AS timestamp1,"
+ " PARSE_DATETIME('%Y%m%d%I%m%S', '20200903210211') AS timestamp2,"
+ " PARSE_DATETIME('%I%m%S', '215313') AS time1,"
+ " PARSE_DATETIME('%m%d%y', '090415') AS date10,"
+ " PARSE_DATETIME('%m%d%y', 'Jun1215') AS date20,"
+ " PARSE_DATETIME('%Y%m%d%I', '2015061221') AS date3,"
+ " PARSE_DATETIME('%Y%d%m', '20150653') AS date5,"
+ " PARSE_DATETIME('%Y%m%d', '20155308') AS date6,"
+ " PARSE_DATETIME('%F%I:%m:%S', '2009-03-2021:25:50') AS timestamp3,"
+ " PARSE_DATETIME('%F%I:%m:%S', '2009-03-2007:25:50') AS timestamp4, "
+ "PARSE_DATETIME('%F%I:%m:%S %Z', '2009-03-20 12:25:50.222') AS timestamp5, "
+ "PARSE_DATETIME('%FT%I:%m:%S', '2012-05-09T04:12:12') AS timestamp6,"
+ " PARSE_DATETIME('%Y- %m-%d %I: -%m:%S', '2015- 09-11 09: -07:23') AS timestamp7,"
+ " PARSE_DATETIME('%Y- %m-%d%I: -%m:%S', '2015- 09-1109: -07:23') AS timestamp8,"
+ " PARSE_DATETIME('%F-%I:%m:%E3S%Ez', '2015-09-11-09:07:23') AS timestamp9\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testToTimestampFunction() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.TO_TIMESTAMP,
builder.literal("2009-03-20 12:25:50"), builder.literal("yyyy-MM-dd HH24:MI:SS"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "timestamp_value"))
.build();
final String expectedSql =
"SELECT TO_TIMESTAMP('2009-03-20 12:25:50', 'yyyy-MM-dd HH24:MI:SS') AS "
+ "\"timestamp_value\"\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery =
"SELECT PARSE_DATETIME('%F %H:%M:%S', '2009-03-20 12:25:50') AS timestamp_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void toTimestampFunction() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.TO_TIMESTAMP,
builder.literal("Jan 15, 1989, 11:00:06 AM"), builder.literal("MMM dd, YYYY,HH:MI:SS AM"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "timestamp_value"))
.build();
final String expectedSql =
"SELECT TO_TIMESTAMP('Jan 15, 1989, 11:00:06 AM', 'MMM dd, YYYY,HH:MI:SS AM') AS "
+ "\"timestamp_value\"\nFROM \"scott\".\"EMP\"";
final String expectedSF =
"SELECT TO_TIMESTAMP('Jan 15, 1989, 11:00:06 AM' , 'MON DD, YYYY,HH:MI:SS AM') AS "
+ "\"timestamp_value\"\nFROM \"scott\".\"EMP\"";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void datediffFunctionWithTwoOperands() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.DATE_DIFF,
builder.literal("1994-07-21"), builder.literal("1993-07-21"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date_diff_value"))
.build();
final String expectedSql =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21') AS \"date_diff_value\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBQ =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21') AS date_diff_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void datediffFunctionWithThreeOperands() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.DATE_DIFF,
builder.literal("1994-07-21"), builder.literal("1993-07-21"), builder.literal("Month"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date_diff_value"))
.build();
final String expectedSql =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21', 'Month') AS \"date_diff_value\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBQ =
"SELECT DATE_DIFF('1994-07-21', '1993-07-21', Month) AS date_diff_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testToDateFunction() {
final RelBuilder builder = relBuilder();
final RexNode parseTSNode1 = builder.call(SqlLibraryOperators.TO_DATE,
builder.literal("2009/03/20"), builder.literal("yyyy/MM/dd"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseTSNode1, "date_value"))
.build();
final String expectedSql =
"SELECT TO_DATE('2009/03/20', 'yyyy/MM/dd') AS \"date_value\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery =
"SELECT DATE(PARSE_DATETIME('%Y/%m/%d', '2009/03/20')) AS date_value\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
/** Fluid interface to run tests. */
static class Sql {
private final SchemaPlus schema;
private final String sql;
private final SqlDialect dialect;
private final Function<RelBuilder, RelNode> relFn;
private final List<Function<RelNode, RelNode>> transforms;
private final SqlParser.Config parserConfig;
private final UnaryOperator<SqlToRelConverter.Config> config;
Sql(CalciteAssert.SchemaSpec schemaSpec, String sql, SqlDialect dialect,
SqlParser.Config parserConfig,
UnaryOperator<SqlToRelConverter.Config> config,
Function<RelBuilder, RelNode> relFn,
List<Function<RelNode, RelNode>> transforms) {
final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
this.schema = CalciteAssert.addSchema(rootSchema, schemaSpec);
this.sql = sql;
this.dialect = dialect;
this.relFn = relFn;
this.transforms = ImmutableList.copyOf(transforms);
this.parserConfig = parserConfig;
this.config = config;
}
Sql(SchemaPlus schema, String sql, SqlDialect dialect,
SqlParser.Config parserConfig,
UnaryOperator<SqlToRelConverter.Config> config,
Function<RelBuilder, RelNode> relFn,
List<Function<RelNode, RelNode>> transforms) {
this.schema = schema;
this.sql = sql;
this.dialect = dialect;
this.relFn = relFn;
this.transforms = ImmutableList.copyOf(transforms);
this.parserConfig = parserConfig;
this.config = config;
}
Sql dialect(SqlDialect dialect) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql relFn(Function<RelBuilder, RelNode> relFn) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql withCalcite() {
return dialect(SqlDialect.DatabaseProduct.CALCITE.getDialect());
}
Sql withClickHouse() {
return dialect(SqlDialect.DatabaseProduct.CLICKHOUSE.getDialect());
}
Sql withDb2() {
return dialect(SqlDialect.DatabaseProduct.DB2.getDialect());
}
Sql withHive() {
return dialect(SqlDialect.DatabaseProduct.HIVE.getDialect());
}
Sql withHive2() {
return dialect(
new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(2)
.withDatabaseMinorVersion(1)
.withNullCollation(NullCollation.LOW)));
}
Sql withHsqldb() {
return dialect(SqlDialect.DatabaseProduct.HSQLDB.getDialect());
}
Sql withMssql() {
return withMssql(14); // MSSQL 2008 = 10.0, 2012 = 11.0, 2017 = 14.0
}
Sql withMssql(int majorVersion) {
final SqlDialect mssqlDialect = DatabaseProduct.MSSQL.getDialect();
return dialect(
new MssqlSqlDialect(MssqlSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(majorVersion)
.withIdentifierQuoteString(mssqlDialect.quoteIdentifier("")
.substring(0, 1))
.withNullCollation(mssqlDialect.getNullCollation())));
}
Sql withMysql() {
return dialect(SqlDialect.DatabaseProduct.MYSQL.getDialect());
}
Sql withMysql8() {
final SqlDialect mysqlDialect = DatabaseProduct.MYSQL.getDialect();
return dialect(
new SqlDialect(MysqlSqlDialect.DEFAULT_CONTEXT
.withDatabaseMajorVersion(8)
.withIdentifierQuoteString(mysqlDialect.quoteIdentifier("")
.substring(0, 1))
.withNullCollation(mysqlDialect.getNullCollation())));
}
Sql withOracle() {
return dialect(SqlDialect.DatabaseProduct.ORACLE.getDialect());
}
Sql withPostgresql() {
return dialect(SqlDialect.DatabaseProduct.POSTGRESQL.getDialect());
}
Sql withPresto() {
return dialect(DatabaseProduct.PRESTO.getDialect());
}
Sql withRedshift() {
return dialect(DatabaseProduct.REDSHIFT.getDialect());
}
Sql withSnowflake() {
return dialect(DatabaseProduct.SNOWFLAKE.getDialect());
}
Sql withSybase() {
return dialect(DatabaseProduct.SYBASE.getDialect());
}
Sql withVertica() {
return dialect(SqlDialect.DatabaseProduct.VERTICA.getDialect());
}
Sql withBigQuery() {
return dialect(SqlDialect.DatabaseProduct.BIG_QUERY.getDialect());
}
Sql withSpark() {
return dialect(DatabaseProduct.SPARK.getDialect());
}
Sql withHiveIdentifierQuoteString() {
final HiveSqlDialect hiveSqlDialect =
new HiveSqlDialect((SqlDialect.EMPTY_CONTEXT)
.withDatabaseProduct(DatabaseProduct.HIVE)
.withIdentifierQuoteString("`"));
return dialect(hiveSqlDialect);
}
Sql withSparkIdentifierQuoteString() {
final SparkSqlDialect sparkSqlDialect =
new SparkSqlDialect((SqlDialect.EMPTY_CONTEXT)
.withDatabaseProduct(DatabaseProduct.SPARK)
.withIdentifierQuoteString("`"));
return dialect(sparkSqlDialect);
}
Sql withPostgresqlModifiedTypeSystem() {
// Postgresql dialect with max length for varchar set to 256
final PostgresqlSqlDialect postgresqlSqlDialect =
new PostgresqlSqlDialect(PostgresqlSqlDialect.DEFAULT_CONTEXT
.withDataTypeSystem(new RelDataTypeSystemImpl() {
@Override public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
return 256;
default:
return super.getMaxPrecision(typeName);
}
}
}));
return dialect(postgresqlSqlDialect);
}
Sql withOracleModifiedTypeSystem() {
// Oracle dialect with max length for varchar set to 512
final OracleSqlDialect oracleSqlDialect =
new OracleSqlDialect(OracleSqlDialect.DEFAULT_CONTEXT
.withDataTypeSystem(new RelDataTypeSystemImpl() {
@Override public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
return 512;
default:
return super.getMaxPrecision(typeName);
}
}
}));
return dialect(oracleSqlDialect);
}
Sql parserConfig(SqlParser.Config parserConfig) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql withConfig(UnaryOperator<SqlToRelConverter.Config> config) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
transforms);
}
Sql optimize(final RuleSet ruleSet, final RelOptPlanner relOptPlanner) {
return new Sql(schema, sql, dialect, parserConfig, config, relFn,
FlatLists.append(transforms, r -> {
Program program = Programs.of(ruleSet);
final RelOptPlanner p =
Util.first(relOptPlanner,
new HepPlanner(
new HepProgramBuilder().addRuleClass(RelOptRule.class)
.build()));
return program.run(p, r, r.getTraitSet(),
ImmutableList.of(), ImmutableList.of());
}));
}
Sql ok(String expectedQuery) {
assertThat(exec(), isLinux(expectedQuery));
return this;
}
Sql throws_(String errorMessage) {
try {
final String s = exec();
throw new AssertionError("Expected exception with message `"
+ errorMessage + "` but nothing was thrown; got " + s);
} catch (Exception e) {
assertThat(e.getMessage(), is(errorMessage));
return this;
}
}
String exec() {
try {
RelNode rel;
if (relFn != null) {
rel = relFn.apply(relBuilder());
} else {
final SqlToRelConverter.Config config = this.config.apply(SqlToRelConverter.config()
.withTrimUnusedFields(false));
final Planner planner =
getPlanner(null, parserConfig, schema, config);
SqlNode parse = planner.parse(sql);
SqlNode validate = planner.validate(parse);
rel = planner.rel(validate).rel;
}
for (Function<RelNode, RelNode> transform : transforms) {
rel = transform.apply(rel);
}
return toSql(rel, dialect);
} catch (Exception e) {
throw TestUtil.rethrow(e);
}
}
public Sql schema(CalciteAssert.SchemaSpec schemaSpec) {
return new Sql(schemaSpec, sql, dialect, parserConfig, config, relFn,
transforms);
}
}
@Test public void testIsNotTrueWithEqualCondition() {
final String query = "select \"product_name\" from \"product\" where "
+ "\"product_name\" = 'Hello World' is not true";
final String bigQueryExpected = "SELECT product_name\n"
+ "FROM foodmart.product\n"
+ "WHERE product_name <> 'Hello World'";
sql(query)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testCoalseceWithCast() {
final String query = "Select coalesce(cast('2099-12-31 00:00:00.123' as TIMESTAMP),\n"
+ "cast('2010-12-31 01:00:00.123' as TIMESTAMP))";
final String expectedHive = "SELECT TIMESTAMP '2099-12-31 00:00:00'";
final String expectedSpark = "SELECT TIMESTAMP '2099-12-31 00:00:00'";
final String bigQueryExpected = "SELECT CAST('2099-12-31 00:00:00' AS DATETIME)";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testCoalseceWithLiteral() {
final String query = "Select coalesce('abc','xyz')";
final String expectedHive = "SELECT 'abc'";
final String expectedSpark = "SELECT 'abc'";
final String bigQueryExpected = "SELECT 'abc'";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testCoalseceWithNull() {
final String query = "Select coalesce(null, 'abc')";
final String expectedHive = "SELECT 'abc'";
final String expectedSpark = "SELECT 'abc'";
final String bigQueryExpected = "SELECT 'abc'";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(bigQueryExpected);
}
@Test public void testLog10Function() {
final String query = "SELECT LOG10(2) as dd";
final String expectedSnowFlake = "SELECT LOG(10, 2) AS \"DD\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testLog10ForOne() {
final String query = "SELECT LOG10(1) as dd";
final String expectedSnowFlake = "SELECT 0 AS \"DD\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testLog10ForColumn() {
final String query = "SELECT LOG10(\"product_id\") as dd from \"product\"";
final String expectedSnowFlake = "SELECT LOG(10, \"product_id\") AS \"DD\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testDivideIntegerSnowflake() {
final RelBuilder builder = relBuilder();
final RexNode intdivideRexNode = builder.call(SqlStdOperatorTable.DIVIDE_INTEGER,
builder.scan("EMP").field(0), builder.scan("EMP").field(3));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(intdivideRexNode, "a"))
.build();
final String expectedSql = "SELECT \"EMPNO\" /INT \"MGR\" AS \"a\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSF = "SELECT FLOOR(\"EMPNO\" / \"MGR\") AS \"a\"\n"
+ "FROM \"scott\".\"EMP\"";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testRoundFunctionWithColumnPlaceHandling() {
final String query = "SELECT ROUND(123.41445, \"product_id\") AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedBq = "SELECT ROUND(123.41445, product_id) AS a\nFROM foodmart.product";
final String expected = "SELECT ROUND(123.41445, product_id) a\n"
+ "FROM foodmart.product";
final String expectedSnowFlake = "SELECT TO_DECIMAL(ROUND(123.41445, "
+ "CASE WHEN \"product_id\" > 38 THEN 38 WHEN \"product_id\" < -12 "
+ "THEN -12 ELSE \"product_id\" END) ,38, 4) AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMssql = "SELECT ROUND(123.41445, [product_id]) AS [a]\n"
+ "FROM [foodmart].[product]";
sql(query)
.withBigQuery()
.ok(expectedBq)
.withHive()
.ok(expected)
.withSpark()
.ok(expected)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMssql);
}
@Test public void testRoundFunctionWithOneParameter() {
final String query = "SELECT ROUND(123.41445) AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMssql = "SELECT ROUND(123.41445, 0) AS [a]\n"
+ "FROM [foodmart].[product]";
sql(query)
.withMssql()
.ok(expectedMssql);
}
@Test public void testTruncateFunctionWithColumnPlaceHandling() {
String query = "select truncate(2.30259, \"employee_id\") from \"employee\"";
final String expectedBigQuery = "SELECT TRUNC(2.30259, employee_id)\n"
+ "FROM foodmart.employee";
final String expectedSnowFlake = "SELECT TRUNCATE(2.30259, CASE WHEN \"employee_id\" > 38"
+ " THEN 38 WHEN \"employee_id\" < -12 THEN -12 ELSE \"employee_id\" END)\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedMssql = "SELECT ROUND(2.30259, [employee_id])"
+ "\nFROM [foodmart].[employee]";
sql(query)
.withBigQuery()
.ok(expectedBigQuery)
.withSnowflake()
.ok(expectedSnowFlake)
.withMssql()
.ok(expectedMssql);
}
@Test public void testTruncateFunctionWithOneParameter() {
String query = "select truncate(2.30259) from \"employee\"";
final String expectedMssql = "SELECT ROUND(2.30259, 0)"
+ "\nFROM [foodmart].[employee]";
sql(query)
.withMssql()
.ok(expectedMssql);
}
@Test public void testWindowFunctionWithOrderByWithoutcolumn() {
String query = "Select count(*) over() from \"employee\"";
final String expectedSnowflake = "SELECT COUNT(*) OVER (ORDER BY 0 ROWS BETWEEN UNBOUNDED "
+ "PRECEDING AND UNBOUNDED FOLLOWING)\n"
+ "FROM \"foodmart\".\"employee\"";
final String mssql = "SELECT COUNT(*) OVER ()\n"
+ "FROM [foodmart].[employee]";
sql(query)
.withSnowflake()
.ok(expectedSnowflake)
.withMssql()
.ok(mssql);
}
@Test public void testWindowFunctionWithOrderByWithcolumn() {
String query = "select count(\"employee_id\") over () as a from \"employee\"";
final String expectedSnowflake = "SELECT COUNT(\"employee_id\") OVER (ORDER BY \"employee_id\" "
+ "ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS \"A\"\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
.withSnowflake()
.ok(expectedSnowflake);
}
@Test public void testRoundFunction() {
final String query = "SELECT ROUND(123.41445, \"product_id\") AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowFlake = "SELECT TO_DECIMAL(ROUND(123.41445, CASE "
+ "WHEN \"product_id\" > 38 THEN 38 WHEN \"product_id\" < -12 THEN -12 "
+ "ELSE \"product_id\" END) ,38, 4) AS \"a\"\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testRandomFunction() {
String query = "select rand_integer(1,3) from \"employee\"";
final String expectedSnowFlake = "SELECT UNIFORM(1, 3, RANDOM())\n"
+ "FROM \"foodmart\".\"employee\"";
final String expectedHive = "SELECT FLOOR(RAND() * (3 - 1 + 1)) + 1\n"
+ "FROM foodmart.employee";
final String expectedBQ = "SELECT FLOOR(RAND() * (3 - 1 + 1)) + 1\n"
+ "FROM foodmart.employee";
final String expectedSpark = "SELECT FLOOR(RAND() * (3 - 1 + 1)) + 1\n"
+ "FROM foodmart.employee";
sql(query)
.withHive()
.ok(expectedHive)
.withSpark()
.ok(expectedSpark)
.withBigQuery()
.ok(expectedBQ)
.withSnowflake()
.ok(expectedSnowFlake);
}
@Test public void testCaseExprForE4() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("E4"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE CASE WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sun' "
+ "THEN 'Sunday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Mon' "
+ "THEN 'Monday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Tue' "
+ "THEN 'Tuesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Wed' "
+ "THEN 'Wednesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Thu' "
+ "THEN 'Thursday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Fri' "
+ "THEN 'Friday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sat' "
+ "THEN 'Saturday' END";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testCaseExprForEEEE() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("EEEE"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE CASE WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sun' "
+ "THEN 'Sunday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Mon' "
+ "THEN 'Monday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Tue' "
+ "THEN 'Tuesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Wed' "
+ "THEN 'Wednesday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Thu' "
+ "THEN 'Thursday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Fri' "
+ "THEN 'Friday' WHEN TO_VARCHAR(\"HIREDATE\", 'DY') = 'Sat' "
+ "THEN 'Saturday' END";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testCaseExprForE3() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("E3"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE TO_VARCHAR(\"HIREDATE\", 'DY')";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void testCaseExprForEEE() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("EEE"), builder.field("HIREDATE"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedSF = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE TO_VARCHAR(\"HIREDATE\", 'DY')";
assertThat(toSql(root, DatabaseProduct.SNOWFLAKE.getDialect()), isLinux(expectedSF));
}
@Test public void octetLength() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.OCTET_LENGTH,
builder.field("ENAME"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE OCTET_LENGTH(ENAME)";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void octetLengthWithLiteral() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.OCTET_LENGTH,
builder.literal("ENAME"));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE OCTET_LENGTH('ENAME')";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt2Shr() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.INT2SHR,
builder.literal(3), builder.literal(1), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE (3 & 6 ) >> 1";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt8Xor() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.BITWISE_XOR,
builder.literal(3), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE 3 ^ 6";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt2Shl() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.INT2SHL,
builder.literal(3), builder.literal(1), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE (3 & 6 ) << 1";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt2And() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.BITWISE_AND,
builder.literal(3), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE 3 & 6";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testInt1Or() {
final RelBuilder builder = relBuilder().scan("EMP");
final RexNode condition = builder.call(SqlLibraryOperators.BITWISE_OR,
builder.literal(3), builder.literal(6));
final RelNode root = relBuilder().scan("EMP").filter(condition).build();
final String expectedBQ = "SELECT *\n"
+ "FROM scott.EMP\n"
+ "WHERE 3 | 6";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBQ));
}
@Test public void testCot() {
final String query = "SELECT COT(0.12)";
final String expectedBQ = "SELECT 1 / TAN(0.12)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testTimestampLiteral() {
final String query = "SELECT Timestamp '1993-07-21 10:10:10'";
final String expectedBQ = "SELECT CAST('1993-07-21 10:10:10' AS DATETIME)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testCaseForLnFunction() {
final String query = "SELECT LN(\"product_id\") as dd from \"product\"";
final String expectedMssql = "SELECT LOG([product_id]) AS [DD]"
+ "\nFROM [foodmart].[product]";
sql(query)
.withMssql()
.ok(expectedMssql);
}
@Test public void testCaseForCeilToCeilingMSSQL() {
final String query = "SELECT CEIL(12345) FROM \"product\"";
final String expected = "SELECT CEILING(12345)\n"
+ "FROM [foodmart].[product]";
sql(query)
.withMssql()
.ok(expected);
}
@Test public void testLastDayMSSQL() {
final String query = "SELECT LAST_DAY(DATE '2009-12-20')";
final String expected = "SELECT EOMONTH('2009-12-20')";
sql(query)
.withMssql()
.ok(expected);
}
@Test public void testCurrentDate() {
String query =
"select CURRENT_DATE from \"product\" where \"product_id\" < 10";
final String expected = "SELECT CAST(GETDATE() AS DATE) AS [CURRENT_DATE]\n"
+ "FROM [foodmart].[product]\n"
+ "WHERE [product_id] < 10";
sql(query).withMssql().ok(expected);
}
@Test public void testCurrentTime() {
String query =
"select CURRENT_TIME from \"product\" where \"product_id\" < 10";
final String expected = "SELECT CAST(GETDATE() AS TIME) AS [CURRENT_TIME]\n"
+ "FROM [foodmart].[product]\n"
+ "WHERE [product_id] < 10";
sql(query).withMssql().ok(expected);
}
@Test public void testCurrentTimestamp() {
String query =
"select CURRENT_TIMESTAMP from \"product\" where \"product_id\" < 10";
final String expected = "SELECT GETDATE() AS [CURRENT_TIMESTAMP]\n"
+ "FROM [foodmart].[product]\n"
+ "WHERE [product_id] < 10";
sql(query).withMssql().ok(expected);
}
@Test public void testDayOfMonth() {
String query = "select DAYOFMONTH( DATE '2008-08-29')";
final String expectedMssql = "SELECT DAY('2008-08-29')";
final String expectedBQ = "SELECT EXTRACT(DAY FROM DATE '2008-08-29')";
sql(query)
.withMssql()
.ok(expectedMssql)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractDecade() {
String query = "SELECT EXTRACT(DECADE FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT CAST(SUBSTR(CAST("
+ "EXTRACT(YEAR FROM DATE '2008-08-29') AS STRING), 0, 3) AS INTEGER)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractCentury() {
String query = "SELECT EXTRACT(CENTURY FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT CAST(CEIL(EXTRACT(YEAR FROM DATE '2008-08-29') / 100) "
+ "AS INTEGER)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractDOY() {
String query = "SELECT EXTRACT(DOY FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT EXTRACT(DAYOFYEAR FROM DATE '2008-08-29')";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractDOW() {
String query = "SELECT EXTRACT(DOW FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT EXTRACT(DAYOFWEEK FROM DATE '2008-08-29')";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractEpoch() {
String query = "SELECT EXTRACT(EPOCH FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT UNIX_SECONDS(DATE '2008-08-29')";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testExtractMillennium() {
String query = "SELECT EXTRACT(MILLENNIUM FROM DATE '2008-08-29')";
final String expectedBQ = "SELECT CAST(SUBSTR(CAST("
+ "EXTRACT(YEAR FROM DATE '2008-08-29') AS STRING), 0, 1) AS INTEGER)";
sql(query)
.withBigQuery()
.ok(expectedBQ);
}
@Test public void testSecFromMidnightFormatTimestamp() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("SEC_FROM_MIDNIGHT"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FD"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('SEC_FROM_MIDNIGHT', \"HIREDATE\") AS"
+ " \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT CAST(DATE_DIFF(HIREDATE, CAST(CAST(HIREDATE AS DATE) "
+ "AS DATETIME), SECOND) AS STRING) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testGetQuarterFromDate() {
final RelBuilder builder = relBuilder();
final RexNode formatDateRexNode = builder.call(SqlLibraryOperators.FORMAT_DATE,
builder.literal("QUARTER"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT FORMAT_DATE('%Q', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testExtractDay() {
String query = "SELECT EXTRACT(DAY FROM CURRENT_DATE), EXTRACT(DAY FROM CURRENT_TIMESTAMP)";
final String expectedSFSql = "SELECT DAY(CURRENT_DATE), DAY(CURRENT_TIMESTAMP)";
final String expectedBQSql = "SELECT EXTRACT(DAY FROM CURRENT_DATE), "
+ "EXTRACT(DAY FROM CURRENT_DATETIME())";
final String expectedMsSql = "SELECT DAY(CAST(GETDATE() AS DATE)), DAY(GETDATE())";
sql(query)
.withSnowflake()
.ok(expectedSFSql)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testExtractMonth() {
String query = "SELECT EXTRACT(MONTH FROM CURRENT_DATE), EXTRACT(MONTH FROM CURRENT_TIMESTAMP)";
final String expectedSFSql = "SELECT MONTH(CURRENT_DATE), MONTH(CURRENT_TIMESTAMP)";
final String expectedBQSql = "SELECT EXTRACT(MONTH FROM CURRENT_DATE), "
+ "EXTRACT(MONTH FROM CURRENT_DATETIME())";
final String expectedMsSql = "SELECT MONTH(CAST(GETDATE() AS DATE)), MONTH(GETDATE())";
sql(query)
.withSnowflake()
.ok(expectedSFSql)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testExtractYear() {
String query = "SELECT EXTRACT(YEAR FROM CURRENT_DATE), EXTRACT(YEAR FROM CURRENT_TIMESTAMP)";
final String expectedSFSql = "SELECT YEAR(CURRENT_DATE), YEAR(CURRENT_TIMESTAMP)";
final String expectedBQSql = "SELECT EXTRACT(YEAR FROM CURRENT_DATE), "
+ "EXTRACT(YEAR FROM CURRENT_DATETIME())";
final String expectedMsSql = "SELECT YEAR(CAST(GETDATE() AS DATE)), YEAR(GETDATE())";
sql(query)
.withSnowflake()
.ok(expectedSFSql)
.withBigQuery()
.ok(expectedBQSql)
.withMssql()
.ok(expectedMsSql);
}
@Test public void testIntervalMultiplyWithInteger() {
String query = "select \"hire_date\" + 10 * INTERVAL '00:01:00' HOUR "
+ "TO SECOND from \"employee\"";
final String expectedBQSql = "SELECT TIMESTAMP_ADD(hire_date, INTERVAL 10 * 60 SECOND)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testDateUnderscoreSeparator() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("YYYYMMDD_HH24MISS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%Y%m%d_%H%M%S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testParseDatetime() {
final RelBuilder builder = relBuilder();
final RexNode parseDatetimeRexNode = builder.call(SqlLibraryOperators.PARSE_TIMESTAMP,
builder.literal("YYYYMMDD_HH24MISS"), builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(parseDatetimeRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT PARSE_DATETIME('%Y%m%d_%H%M%S', HIREDATE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testUnixFunctions() {
final RelBuilder builder = relBuilder();
final RexNode unixSecondsRexNode = builder.call(SqlLibraryOperators.UNIX_SECONDS,
builder.scan("EMP").field(4));
final RexNode unixMicrosRexNode = builder.call(SqlLibraryOperators.UNIX_MICROS,
builder.scan("EMP").field(4));
final RexNode unixMillisRexNode = builder.call(SqlLibraryOperators.UNIX_MILLIS,
builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(unixSecondsRexNode, "US"),
builder.alias(unixMicrosRexNode, "UM"),
builder.alias(unixMillisRexNode, "UMI"))
.build();
final String expectedBiqQuery = "SELECT UNIX_SECONDS(CAST(HIREDATE AS TIMESTAMP)) AS US, "
+ "UNIX_MICROS(CAST(HIREDATE AS TIMESTAMP)) AS UM, UNIX_MILLIS(CAST(HIREDATE AS TIMESTAMP)) "
+ "AS UMI\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimestampFunctions() {
final RelBuilder builder = relBuilder();
final RexNode unixSecondsRexNode = builder.call(SqlLibraryOperators.TIMESTAMP_SECONDS,
builder.scan("EMP").field(4));
final RexNode unixMicrosRexNode = builder.call(SqlLibraryOperators.TIMESTAMP_MICROS,
builder.scan("EMP").field(4));
final RexNode unixMillisRexNode = builder.call(SqlLibraryOperators.TIMESTAMP_MILLIS,
builder.scan("EMP").field(4));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(unixSecondsRexNode, "TS"),
builder.alias(unixMicrosRexNode, "TM"),
builder.alias(unixMillisRexNode, "TMI"))
.build();
final String expectedBiqQuery = "SELECT CAST(TIMESTAMP_SECONDS(HIREDATE) AS DATETIME) AS TS, "
+ "CAST(TIMESTAMP_MICROS(HIREDATE) AS DATETIME) AS TM, CAST(TIMESTAMP_MILLIS(HIREDATE) AS "
+ "DATETIME) AS TMI\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testFormatTimestamp() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("EEEE"),
builder.cast(builder.literal("1999-07-01 15:00:00-08:00"), SqlTypeName.TIMESTAMP));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode, "FT"))
.build();
final String expectedBiqQuery =
"SELECT FORMAT_TIMESTAMP('%A', CAST('1999-07-01 15:00:00-08:00' AS TIMESTAMP)) AS FT\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testGroupingFunction() {
String query = "SELECT \"first_name\",\"last_name\", "
+ "grouping(\"first_name\")+ grouping(\"last_name\") "
+ "from \"foodmart\".\"employee\" group by \"first_name\",\"last_name\"";
final String expectedBQSql = "SELECT first_name, last_name, CASE WHEN first_name IS NULL THEN"
+ " 1 ELSE 0 END + CASE WHEN last_name IS NULL THEN 1 ELSE 0 END\n"
+ "FROM foodmart.employee\n"
+ "GROUP BY first_name, last_name";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testDateMinus() {
String query = "SELECT \"birth_date\" - \"birth_date\" from \"foodmart\".\"employee\"";
final String expectedBQSql = "SELECT DATE_DIFF(birth_date, birth_date, DAY)\n"
+ "FROM foodmart.employee";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testhashbucket() {
final RelBuilder builder = relBuilder();
final RexNode formatDateRexNode = builder.call(SqlLibraryOperators.HASHBUCKET,
builder.call(SqlLibraryOperators.HASHROW, builder.scan("EMP").field(0)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatDateRexNode, "FD"))
.build();
final String expectedSql = "SELECT HASHBUCKET(HASHROW(\"EMPNO\")) AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FARM_FINGERPRINT(EMPNO) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetrunc() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("DAY"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'DAY') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', DAY) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithYear() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("YEAR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'YEAR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', YEAR) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithQuarter() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("QUARTER"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'QUARTER') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', QUARTER) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithMonth() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("MONTH"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'MONTH') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', MONTH) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testdatetruncWithWeek() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("2008-19-12"), builder.literal("WEEK"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('2008-19-12', 'WEEK') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATE_TRUNC('2008-19-12', WEEK) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithYear() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("YEAR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'YEAR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " YEAR) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMonth() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MONTH"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MONTH') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MONTH) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithQuarter() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("QUARTER"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'QUARTER') AS \"FD\""
+ "\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " QUARTER) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithWeek() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("WEEK"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'WEEK') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " WEEK) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithDay() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("DAY"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'DAY') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " DAY) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithHour() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("HOUR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'HOUR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " HOUR) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMinute() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MINUTE"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MINUTE') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MINUTE) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("SECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'SECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " SECOND) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMilliSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MILLISECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MILLISECOND')"
+ " AS \"FD\"\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MILLISECOND) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testDateTimeTruncWithMicroSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.cast(builder.literal("2017-02-14 20:38:40"), SqlTypeName.TIMESTAMP),
builder.literal("MICROSECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC(TIMESTAMP '2017-02-14 20:38:40', 'MICROSECOND')"
+ " AS \"FD\"\nFROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT DATETIME_TRUNC(CAST('2017-02-14 20:38:40' AS DATETIME),"
+ " MICROSECOND) AS FD\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithHour() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("HOUR"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'HOUR') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', HOUR) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithMinute() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("MINUTE"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'MINUTE') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', MINUTE) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("SECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'SECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', SECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithMiliSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("MILLISECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'MILLISECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', MILLISECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testTimeTruncWithMicroSecond() {
final RelBuilder builder = relBuilder();
final RexNode trunc = builder.call(SqlLibraryOperators.TRUNC,
builder.literal("20:48:18"), builder.literal("MICROSECOND"));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(trunc, "FD"))
.build();
final String expectedSql = "SELECT TRUNC('20:48:18', 'MICROSECOND') AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TIME_TRUNC('20:48:18', MICROSECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testhashrow() {
final RelBuilder builder = relBuilder();
final RexNode hashrow = builder.call(SqlLibraryOperators.HASHROW,
builder.scan("EMP").field(1));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(hashrow, "FD"))
.build();
final String expectedSql = "SELECT HASHROW(\"ENAME\") AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FARM_FINGERPRINT(ENAME) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
RelNode createLogicalValueRel(RexNode col1, RexNode col2) {
final RelBuilder builder = relBuilder();
RelDataTypeField field = new RelDataTypeFieldImpl("ZERO", 0,
builder.getTypeFactory().createSqlType(SqlTypeName.INTEGER));
List<RelDataTypeField> fieldList = new ArrayList<>();
fieldList.add(field);
RelRecordType type = new RelRecordType(fieldList);
builder.values(
ImmutableList.of(
ImmutableList.of(
builder.getRexBuilder().makeZeroLiteral(
builder.getTypeFactory().createSqlType(SqlTypeName.INTEGER))
)), type);
builder.project(col1, col2);
return builder.build();
}
@Test public void testMultipleUnionWithLogicalValue() {
final RelBuilder builder = relBuilder();
builder.push(
createLogicalValueRel(builder.alias(builder.literal("ALA"), "col1"),
builder.alias(builder.literal("AmericaAnchorage"), "col2")));
builder.push(
createLogicalValueRel(builder.alias(builder.literal("ALAW"), "col1"),
builder.alias(builder.literal("USAleutian"), "col2")));
builder.union(true);
builder.push(
createLogicalValueRel(builder.alias(builder.literal("AST"), "col1"),
builder.alias(builder.literal("AmericaHalifax"), "col2")));
builder.union(true);
final RelNode root = builder.build();
final String expectedHive = "SELECT 'ALA' col1, 'AmericaAnchorage' col2\n"
+ "UNION ALL\n"
+ "SELECT 'ALAW' col1, 'USAleutian' col2\n"
+ "UNION ALL\n"
+ "SELECT 'AST' col1, 'AmericaHalifax' col2";
final String expectedBigQuery = "SELECT 'ALA' AS col1, 'AmericaAnchorage' AS col2\n"
+ "UNION ALL\n"
+ "SELECT 'ALAW' AS col1, 'USAleutian' AS col2\n"
+ "UNION ALL\n"
+ "SELECT 'AST' AS col1, 'AmericaHalifax' AS col2";
relFn(b -> root)
.withHive2().ok(expectedHive)
.withBigQuery().ok(expectedBigQuery);
}
@Test public void testRowid() {
final RelBuilder builder = relBuilder();
final RexNode rowidRexNode = builder.call(SqlLibraryOperators.ROWID);
final RelNode root = builder
.scan("EMP")
.project(builder.alias(rowidRexNode, "FD"))
.build();
final String expectedSql = "SELECT ROWID() AS \"FD\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT GENERATE_UUID() AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testEscapeFunction() {
String query =
"SELECT '\\\\PWFSNFS01EFS\\imagenowcifs\\debitmemo' AS DM_SENDFILE_PATH1";
final String expectedBQSql =
"SELECT '\\\\\\\\PWFSNFS01EFS\\\\imagenowcifs\\\\debitmemo' AS "
+ "DM_SENDFILE_PATH1";
sql(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testTimeAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlLibraryOperators.TIME_ADD,
builder.literal("00:00:00"),
builder.call(SqlLibraryOperators.INTERVAL_SECONDS, builder.literal(10000)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT TIME_ADD('00:00:00', INTERVAL 10000 SECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testIntervalSeconds() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call
(SqlLibraryOperators.INTERVAL_SECONDS, builder.literal(10000));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT INTERVAL 10000 SECOND AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test void testUnicodeCharacters() {
final String query = "SELECT 'ð', '°C' FROM \"product\"";
final String expected = "SELECT '\\u00f0', '\\u00b0C'\n"
+ "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
}
@Test public void testPlusForTimeAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.PLUS,
builder.cast(builder.literal("12:15:07"), SqlTypeName.TIME),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT TIME_ADD(TIME '12:15:07', INTERVAL 1 MICROSECOND) "
+ "AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testMinusForTimeSub() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.MINUS,
builder.cast(builder.literal("12:15:07"), SqlTypeName.TIME),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT TIME_SUB(TIME '12:15:07', INTERVAL 1 MICROSECOND) "
+ "AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testPlusForTimestampAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.PLUS,
builder.cast(builder.literal("1999-07-01 15:00:00-08:00"), SqlTypeName.TIMESTAMP),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery =
"SELECT TIMESTAMP_ADD(CAST('1999-07-01 15:00:00-08:00' AS DATETIME), "
+ "INTERVAL 1 MICROSECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testPlusForTimestampSub() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.MINUS,
builder.cast(builder.literal("1999-07-01 15:00:00-08:00"), SqlTypeName.TIMESTAMP),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(1000),
new SqlIntervalQualifier(MICROSECOND, null, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery =
"SELECT TIMESTAMP_SUB(CAST('1999-07-01 15:00:00-08:00' AS DATETIME), "
+ "INTERVAL 1 MICROSECOND) AS FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testPlusForDateAdd() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.PLUS,
builder.cast(builder.literal("1999-07-01"), SqlTypeName.DATE),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(86400000),
new SqlIntervalQualifier(DAY, 6, DAY,
-1, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT DATE_ADD(DATE '1999-07-01', INTERVAL 1 DAY) AS FD\n"
+ "FROM scott.EMP";
final String expectedSparkQuery = "SELECT DATE '1999-07-01' + INTERVAL '1' DAY FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testPlusForDateSub() {
final RelBuilder builder = relBuilder();
final RexNode createRexNode = builder.call(SqlStdOperatorTable.MINUS,
builder.cast(builder.literal("1999-07-01"), SqlTypeName.DATE),
builder.getRexBuilder().makeIntervalLiteral(new BigDecimal(86400000),
new SqlIntervalQualifier(DAY, 6, DAY,
-1, SqlParserPos.ZERO)));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(createRexNode, "FD"))
.build();
final String expectedBiqQuery = "SELECT DATE_SUB(DATE '1999-07-01', INTERVAL 1 DAY) AS FD\n"
+ "FROM scott.EMP";
final String expectedSparkQuery = "SELECT DATE '1999-07-01' - INTERVAL '1' DAY FD\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testWhenTableNameAndColumnNameIsSame() {
String query =
"select \"test\" from \"foodmart\".\"test\"";
final String expectedBQSql =
"SELECT test.test\n"
+ "FROM foodmart.test AS test";
sqlTest(query)
.withBigQuery()
.ok(expectedBQSql);
}
@Test public void testTimeOfDayFunction() {
final RelBuilder builder = relBuilder();
final RexNode formatTimestampRexNode2 = builder.call(SqlLibraryOperators.FORMAT_TIMESTAMP,
builder.literal("TIMEOFDAY"), builder.call(SqlLibraryOperators.CURRENT_TIMESTAMP));
final RelNode root = builder
.scan("EMP")
.project(builder.alias(formatTimestampRexNode2, "FD2"))
.build();
final String expectedSql = "SELECT FORMAT_TIMESTAMP('TIMEOFDAY', CURRENT_TIMESTAMP) AS "
+ "\"FD2\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FORMAT_TIMESTAMP('%c', CURRENT_DATETIME()) AS FD2\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test void testConversionOfFilterWithCrossJoinToFilterWithInnerJoin() {
String query =
"select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\" "
+ "and \"e\".\"employee_id\" > 2";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id\n"
+ "WHERE employee.employee_id > 2";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void testConversionOfFilterWithCrossJoinToFilterWithInnerJoinWithOneConditionInFilter() {
String query =
"select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\"";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void testConversionOfFilterWithThreeCrossJoinToFilterWithInnerJoin() {
String query = "select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\", \n"
+ " \"foodmart\".\"reserve_employee\" as \"re\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\" and \"e\".\"employee_id\" > 2\n"
+ " and \"re\".\"employee_id\" > \"e\".\"employee_id\"\n"
+ " and \"e\".\"department_id\" > 5";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON employee.department_id = department.department_id\n"
+ "INNER JOIN foodmart.reserve_employee "
+ "ON employee.employee_id < reserve_employee.employee_id\n"
+ "WHERE employee.employee_id > 2 AND employee.department_id > 5";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void testConversionOfFilterWithCompositeConditionWithThreeCrossJoinToFilterWithInnerJoin() {
String query = "select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\", \n"
+ " \"foodmart\".\"reserve_employee\" as \"re\"\n"
+ " where (\"e\".\"department_id\" = \"d\".\"department_id\"\n"
+ " or \"re\".\"employee_id\" = \"e\".\"employee_id\")\n"
+ " and \"re\".\"employee_id\" = \"d\".\"department_id\"\n";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON TRUE\n"
+ "INNER JOIN foodmart.reserve_employee ON TRUE\n"
+ "WHERE (employee.department_id = department.department_id "
+ "OR reserve_employee.employee_id = employee.employee_id) "
+ "AND reserve_employee.employee_id = department.department_id";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
//WHERE t1.c1 = t2.c1 AND t2.c2 = t3.c2 AND (t1.c3 = t3.c3 OR t1.c4 = t2.c4)
@Test void testFilterWithParenthesizedConditionsWithThreeCrossJoinToFilterWithInnerJoin() {
String query = "select *\n"
+ " from \"foodmart\".\"employee\" as \"e\", \"foodmart\".\"department\" as \"d\", \n"
+ " \"foodmart\".\"reserve_employee\" as \"re\"\n"
+ " where \"e\".\"department_id\" = \"d\".\"department_id\"\n"
+ " and \"re\".\"employee_id\" = \"d\".\"department_id\"\n"
+ " and (\"re\".\"department_id\" < \"d\".\"department_id\"\n"
+ " or \"d\".\"department_id\" = \"re\".\"department_id\")\n";
String expect = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "INNER JOIN foodmart.department ON TRUE\n"
+ "INNER JOIN foodmart.reserve_employee ON TRUE\n"
+ "WHERE employee.department_id = department.department_id "
+ "AND reserve_employee.employee_id = department.department_id "
+ "AND (reserve_employee.department_id < department.department_id "
+ "OR department.department_id = reserve_employee.department_id)";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterExtractInnerJoinRule.class);
HepPlanner hepPlanner = new HepPlanner(builder.build());
RuleSet rules = RuleSets.ofList(CoreRules.FILTER_EXTRACT_INNER_JOIN_RULE);
sql(query).withBigQuery().optimize(rules, hepPlanner).ok(expect);
}
@Test void translateCastOfTimestampWithLocalTimeToTimestampInBq() {
final RelBuilder relBuilder = relBuilder();
final RexNode castTimestampTimeZoneCall =
relBuilder.cast(relBuilder.call(SqlStdOperatorTable.CURRENT_TIMESTAMP),
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE);
final RelNode root = relBuilder
.values(new String[] {"c"}, 1)
.project(castTimestampTimeZoneCall)
.build();
final String expectedBigQuery =
"SELECT CAST(CURRENT_DATETIME() AS TIMESTAMP_WITH_LOCAL_TIME_ZONE) AS `$f0`";
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBigQuery));
}
@Test public void testParseDateTimeFormat() {
final RelBuilder builder = relBuilder();
final RexNode parseDateNode = builder.call(SqlLibraryOperators.PARSE_DATE,
builder.literal("YYYYMMDD"), builder.literal("99991231"));
final RexNode parseTimeNode = builder.call(SqlLibraryOperators.PARSE_TIME,
builder.literal("HH24MISS"), builder.literal("122333"));
final RelNode root = builder.scan("EMP").
project(builder.alias(parseDateNode, "date1"),
builder.alias(parseTimeNode, "time1"))
.build();
final String expectedSql = "SELECT PARSE_DATE('YYYYMMDD', '99991231') AS \"date1\", "
+ "PARSE_TIME('HH24MISS', '122333') AS \"time1\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT PARSE_DATE('%Y%m%d', '99991231') AS date1, "
+ "PARSE_TIME('%H%M%S', '122333') AS time1\n"
+ "FROM scott.EMP";
final String expectedSparkQuery = "SELECT PARSE_DATE('YYYYMMDD', '99991231') date1, "
+ "PARSE_TIME('HH24MISS', '122333') time1\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testPositionOperator() {
final RelBuilder builder = relBuilder();
final RexNode parseTrimNode = builder.call(SqlStdOperatorTable.POSITION,
builder.literal("a"),
builder.literal("Name"));
final RelNode root = builder.scan("EMP").
project(builder.alias(parseTrimNode, "t"))
.build();
final String expectedSql = "SELECT POSITION('a' IN 'Name') AS \"t\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSparkQuery = "SELECT POSITION('a' IN 'Name') t\nFROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testBigQueryErrorOperator() {
final RelBuilder builder = relBuilder();
final SqlFunction errorOperator =
new SqlFunction("ERROR",
SqlKind.OTHER_FUNCTION,
ReturnTypes.VARCHAR_2000,
null,
OperandTypes.STRING_STRING,
SqlFunctionCategory.SYSTEM);
final RexNode parseTrimNode = builder.call(errorOperator,
builder.literal("Error Message!"));
final RelNode root = builder.scan("EMP").
project(builder.alias(parseTrimNode, "t"))
.build();
final String expectedSql = "SELECT ERROR('Error Message!') AS \"t\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedSparkQuery = "SELECT RAISE_ERROR('Error Message!') t\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.SPARK.getDialect()), isLinux(expectedSparkQuery));
}
@Test public void testTrue() {
final RelBuilder builder = relBuilder();
final RexNode trueRexNode = builder.call(TRUE);
final RelNode root = builder.scan("EMP")
.project(builder.alias(trueRexNode, "dm"))
.build();
final String expectedSql = "SELECT TRUE() AS \"dm\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT TRUE AS dm\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
@Test public void testFalse() {
final RelBuilder builder = relBuilder();
final RexNode falseRexNode = builder.call(FALSE);
final RelNode root = builder.scan("EMP")
.project(builder.alias(falseRexNode, "dm"))
.build();
final String expectedSql = "SELECT FALSE() AS \"dm\"\n"
+ "FROM \"scott\".\"EMP\"";
final String expectedBiqQuery = "SELECT FALSE AS dm\n"
+ "FROM scott.EMP";
assertThat(toSql(root, DatabaseProduct.CALCITE.getDialect()), isLinux(expectedSql));
assertThat(toSql(root, DatabaseProduct.BIG_QUERY.getDialect()), isLinux(expectedBiqQuery));
}
}
|
RNBS-140: Format violation fixed by running " ./gradlew autostyleApply"
|
core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
|
RNBS-140: Format violation fixed by running " ./gradlew autostyleApply"
|
|
Java
|
apache-2.0
|
95877e1aef3b72755351303a91b96791f70eeb33
| 0
|
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2020-2021 JaamSim Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jaamsim.ui;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import javax.swing.Box;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.ListSelectionModel;
import javax.swing.border.EmptyBorder;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import com.jaamsim.Graphics.View;
import com.jaamsim.basicsim.JaamSimModel;
import com.jaamsim.basicsim.Simulation;
import com.jaamsim.controllers.RenderManager;
import com.jaamsim.input.Input;
import com.jaamsim.input.InputAgent;
import com.jaamsim.render.CameraInfo;
import com.jaamsim.render.Future;
public class ExampleBox extends JDialog {
private String presentExample;
private final ArrayList<String> exampleList = new ArrayList<>();
private JList<String> list;
private final SearchField exampleSearch;
private final JLabel previewLabel;
private final ImageIcon previewIcon = new ImageIcon();
private final HashMap<String, Future<BufferedImage>> imageCache = new HashMap<>();
private static ExampleBox myInstance;
private static final String DIALOG_NAME = "Examples - JaamSim";
private static final String DEFAULT_TOPIC = "";
public ExampleBox() {
super((JDialog)null, DIALOG_NAME, false);
setIconImages(GUIFrame.getWindowIcons());
setResizable(true);
setDefaultCloseOperation(HIDE_ON_CLOSE);
getContentPane().setLayout( new BorderLayout() );
setMinimumSize(new Dimension(300, 300));
setPreferredSize(new Dimension(1000, 800));
// Example List
for (String name : GUIFrame.getResourceFileNames("/resources/examples")) {
if (name.endsWith(".cfg")) {
exampleList.add(name.substring(0, name.length() - 4));
}
}
Collections.sort(exampleList, Input.uiSortOrder);
// Example search
exampleSearch = new SearchField(30) {
@Override
public void showTopic(String topic) {
ExampleBox.this.showTopic(topic);
}
@Override
public ArrayList<String> getTopicList(String str) {
ArrayList<String> ret = new ArrayList<>();
for (String topic : exampleList) {
if (!topic.toUpperCase().contains(str.toUpperCase()))
continue;
ret.add(topic);
}
return ret;
}
};
exampleSearch.setToolTipText(GUIFrame.formatToolTip("Example Model",
"Title of the example model to find."));
JPanel textPanel = new JPanel();
textPanel.setLayout( new FlowLayout(FlowLayout.CENTER, 0, 0) );
textPanel.add(new JLabel("Find Example Model:"));
textPanel.add(Box.createRigidArea(new Dimension(5, 5)));
textPanel.add(exampleSearch);
textPanel.setBorder(new EmptyBorder(10, 5, 5, 5));
getContentPane().add(textPanel, BorderLayout.NORTH);
// Example selector
String[] topics = new String[exampleList.size()];
topics = exampleList.toArray(topics);
list = new JList<>(topics);
list.setSelectionMode(ListSelectionModel.SINGLE_INTERVAL_SELECTION);
list.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
int ind = list.getSelectedIndex();
if (ind == -1)
return;
showTopic(exampleList.get(ind));
exampleSearch.setText("");
}
});
JScrollPane listScroller = new JScrollPane(list);
listScroller.setBorder(new EmptyBorder(5, 5, 5, 0));
listScroller.setPreferredSize(new Dimension(250, 200));
getContentPane().add(listScroller, BorderLayout.WEST);
// Example preview
previewLabel = new JLabel("", JLabel.CENTER);
previewLabel.setBorder(new EmptyBorder(5, 5, 5, 5));
getContentPane().add(previewLabel, BorderLayout.CENTER);
// Open button
JButton openButton = new JButton("Open");
openButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// Create the new simulation model
JaamSimModel simModel = new JaamSimModel(presentExample + ".cfg");
// Load the specified model file
simModel.autoLoad();
GUIFrame.getInstance().setWindowDefaults(simModel.getSimulation());
InputAgent.readResource(simModel, "<res>/examples/" + presentExample + ".cfg");
simModel.postLoad();
// Display the new model
GUIFrame.setJaamSimModel(simModel);
FrameBox.setSelectedEntity(simModel.getSimulation(), false);
// Bring the new model to front
GUIFrame.getInstance().setVisible(true);
}
});
// Close button
JButton closeButton = new JButton("Close");
closeButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
setVisible(false);
}
});
// Add the buttons to the dialog
JPanel buttonPanel = new JPanel();
buttonPanel.setLayout( new FlowLayout(FlowLayout.CENTER) );
buttonPanel.add(openButton);
buttonPanel.add(closeButton);
getContentPane().add("South", buttonPanel);
pack();
// Set initial position in middle of screen
setLocationRelativeTo(null);
}
public synchronized static ExampleBox getInstance() {
if (myInstance == null)
myInstance = new ExampleBox();
return myInstance;
}
private synchronized static void killInstance() {
myInstance = null;
}
@Override
public void dispose() {
killInstance();
super.dispose();
}
public void showDialog() {
showDialog("");
}
/**
* Launches the Help tool for the specified string that determines the topic to be displayed.
* If the string is an exact match to a topic then that topic is displayed.
* Otherwise, the displayed topic is the first one in the list of topics that contains the string.
* If the string is blank, the previous topic is retained.
* If there is no previous topic, the default topic is displayed.
* @param str - determines the topic to be displayed
*/
public void showDialog(String str) {
String topic = ""; // displays the present topic
// Present topic or default topic
if (str.isEmpty()) {
if (presentExample == null)
topic = DEFAULT_TOPIC;
}
// Exact match to a topic
else if (exampleList.contains(str)) {
topic = str;
}
// First topic that contains the string
else {
for (String tpc : exampleList) {
if (tpc.toUpperCase().contains(str.toUpperCase())) {
topic = tpc;
break;
}
}
}
// Display the selected topic
showTopic(topic);
exampleSearch.setText("");
this.setVisible(true);
}
private void showTopic(String topic) {
try {
URL url = GUIFrame.class.getResource("/resources/examples/" + topic + ".cfg");
if (url == null)
return;
presentExample = topic;
int ind = exampleList.indexOf(topic);
list.setSelectedIndex(ind);
list.ensureIndexIsVisible(ind);
// Clear the old preview image
previewLabel.setIcon(null);
// Get the preview image
Future<BufferedImage> fi = getPreview(topic);
fi.blockUntilDone();
if (fi.failed()) {
System.out.println(fi.getFailureMessage());
return; // Something went wrong...
}
// Display the image
previewIcon.setImage(fi.get());
previewLabel.setIcon(previewIcon);
}
catch (Throwable t) {}
}
public Future<BufferedImage> getPreview(String example) {
synchronized (imageCache) {
// Return the cached image if available
Future<BufferedImage> cached = imageCache.get(example);
if (cached != null) {
return cached;
}
// Create the new model
JaamSimModel simModel = new JaamSimModel(example + ".cfg");
simModel.autoLoad();
InputAgent.readResource(simModel, "<res>/examples/" + example + ".cfg");
simModel.postLoad();
// Add labels and sub-models
Simulation simulation = simModel.getSimulation();
simModel.showTemporaryLabels( simulation.isShowLabels() );
simModel.showSubModels( simulation.isShowSubModels() );
// Get the View to render
View view = null;
for (View v : simModel.getInstanceIterator(View.class)) {
view = v;
break;
}
// Render the view offscreen
if (view == null || !RenderManager.isGood())
return null;
CameraInfo camInfo = view.getCameraInfo();
Future<BufferedImage> fi = RenderManager.inst().renderOffscreen(simModel, 0.0d, camInfo, 640, 480);
// Save and return the image
imageCache.put(example, fi);
return fi;
}
}
}
|
src/main/java/com/jaamsim/ui/ExampleBox.java
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2020-2021 JaamSim Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jaamsim.ui;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import javax.swing.Box;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.ListSelectionModel;
import javax.swing.border.EmptyBorder;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import com.jaamsim.Graphics.View;
import com.jaamsim.basicsim.JaamSimModel;
import com.jaamsim.basicsim.Simulation;
import com.jaamsim.controllers.RenderManager;
import com.jaamsim.input.Input;
import com.jaamsim.input.InputAgent;
import com.jaamsim.render.CameraInfo;
import com.jaamsim.render.Future;
public class ExampleBox extends JDialog {
private String presentExample;
private final ArrayList<String> exampleList = new ArrayList<>();
private JList<String> list;
private final SearchField exampleSearch;
private final JLabel previewLabel;
private final ImageIcon previewIcon = new ImageIcon();
private final HashMap<String, Future<BufferedImage>> imageCache = new HashMap<>();
private static ExampleBox myInstance;
private static final String DIALOG_NAME = "Examples - JaamSim";
private static final String DEFAULT_TOPIC = "";
public ExampleBox() {
super((JDialog)null, DIALOG_NAME, false);
setIconImages(GUIFrame.getWindowIcons());
setResizable(true);
setDefaultCloseOperation(HIDE_ON_CLOSE);
getContentPane().setLayout( new BorderLayout() );
setMinimumSize(new Dimension(300, 300));
setPreferredSize(new Dimension(1000, 800));
// Example List
for (String name : GUIFrame.getResourceFileNames("/resources/examples")) {
if (name.endsWith(".cfg")) {
exampleList.add(name.substring(0, name.length() - 4));
}
}
Collections.sort(exampleList, Input.uiSortOrder);
// Example search
exampleSearch = new SearchField(30) {
@Override
public void showTopic(String topic) {
ExampleBox.this.showTopic(topic);
}
@Override
public ArrayList<String> getTopicList(String str) {
ArrayList<String> ret = new ArrayList<>();
for (String topic : exampleList) {
if (!topic.toUpperCase().contains(str.toUpperCase()))
continue;
ret.add(topic);
}
return ret;
}
};
exampleSearch.setToolTipText(GUIFrame.formatToolTip("Example Model",
"Title of the example model to find."));
JPanel textPanel = new JPanel();
textPanel.setLayout( new FlowLayout(FlowLayout.CENTER, 0, 0) );
textPanel.add(new JLabel("Find Example Model:"));
textPanel.add(Box.createRigidArea(new Dimension(5, 5)));
textPanel.add(exampleSearch);
textPanel.setBorder(new EmptyBorder(10, 5, 5, 5));
getContentPane().add(textPanel, BorderLayout.NORTH);
// Example selector
String[] topics = new String[exampleList.size()];
topics = exampleList.toArray(topics);
list = new JList<>(topics);
list.setSelectionMode(ListSelectionModel.SINGLE_INTERVAL_SELECTION);
list.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
int ind = list.getSelectedIndex();
if (ind == -1)
return;
showTopic(exampleList.get(ind));
exampleSearch.setText("");
}
});
JScrollPane listScroller = new JScrollPane(list);
listScroller.setBorder(new EmptyBorder(5, 5, 5, 0));
listScroller.setPreferredSize(new Dimension(250, 200));
getContentPane().add(listScroller, BorderLayout.WEST);
// Example preview
previewLabel = new JLabel("", JLabel.CENTER);
previewLabel.setBorder(new EmptyBorder(5, 5, 5, 5));
getContentPane().add(previewLabel, BorderLayout.CENTER);
// Open button
JButton openButton = new JButton("Open");
openButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// Create the new simulation model
JaamSimModel simModel = new JaamSimModel(presentExample + ".cfg");
// Load the specified model file
simModel.autoLoad();
GUIFrame.getInstance().setWindowDefaults(simModel.getSimulation());
InputAgent.readResource(simModel, "<res>/examples/" + presentExample + ".cfg");
simModel.postLoad();
// Display the new model
GUIFrame.setJaamSimModel(simModel);
FrameBox.setSelectedEntity(simModel.getSimulation(), false);
// Bring the new model to front
GUIFrame.getInstance().setVisible(true);
}
});
// Close button
JButton closeButton = new JButton("Close");
closeButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
setVisible(false);
}
});
// Add the buttons to the dialog
JPanel buttonPanel = new JPanel();
buttonPanel.setLayout( new FlowLayout(FlowLayout.CENTER) );
buttonPanel.add(openButton);
buttonPanel.add(closeButton);
getContentPane().add("South", buttonPanel);
pack();
// Set initial position in middle of screen
setLocationRelativeTo(null);
}
public synchronized static ExampleBox getInstance() {
if (myInstance == null)
myInstance = new ExampleBox();
return myInstance;
}
private synchronized static void killInstance() {
myInstance = null;
}
@Override
public void dispose() {
killInstance();
super.dispose();
}
public void showDialog() {
showDialog("");
}
/**
* Launches the Help tool for the specified string that determines the topic to be displayed.
* If the string is an exact match to a topic then that topic is displayed.
* Otherwise, the displayed topic is the first one in the list of topics that contains the string.
* If the string is blank, the previous topic is retained.
* If there is no previous topic, the default topic is displayed.
* @param str - determines the topic to be displayed
*/
public void showDialog(String str) {
String topic = ""; // displays the present topic
// Present topic or default topic
if (str.isEmpty()) {
if (presentExample == null)
topic = DEFAULT_TOPIC;
}
// Exact match to a topic
else if (exampleList.contains(str)) {
topic = str;
}
// First topic that contains the string
else {
for (String tpc : exampleList) {
if (tpc.toUpperCase().contains(str.toUpperCase())) {
topic = tpc;
break;
}
}
}
// Display the selected topic
showTopic(topic);
exampleSearch.setText("");
this.setVisible(true);
}
private void showTopic(String topic) {
try {
URL url = GUIFrame.class.getResource("/resources/examples/" + topic + ".cfg");
if (url == null)
return;
presentExample = topic;
int ind = exampleList.indexOf(topic);
list.setSelectedIndex(ind);
list.ensureIndexIsVisible(ind);
// Clear the old preview image
previewLabel.setIcon(null);
// Get the preview image
Future<BufferedImage> fi = getPreview(topic);
fi.blockUntilDone();
if (fi.failed()) {
System.out.println(fi.getFailureMessage());
return; // Something went wrong...
}
// Display the image
previewIcon.setImage(fi.get());
previewLabel.setIcon(previewIcon);
}
catch (Throwable t) {}
}
public Future<BufferedImage> getPreview(String example) {
synchronized (imageCache) {
// Return the cached image if available
Future<BufferedImage> cached = imageCache.get(example);
if (cached != null) {
return cached;
}
// Create the new model
JaamSimModel simModel = new JaamSimModel(example + ".cfg");
simModel.autoLoad();
Simulation simulation = simModel.getSimulation();
GUIFrame.getInstance().setWindowDefaults(simulation);
InputAgent.readResource(simModel, "<res>/examples/" + example + ".cfg");
simModel.postLoad();
// Add labels and sub-models
simModel.showTemporaryLabels( simulation.isShowLabels() );
simModel.showSubModels( simulation.isShowSubModels() );
// Get the View to render
View view = null;
for (View v : simModel.getInstanceIterator(View.class)) {
view = v;
break;
}
// Render the view offscreen
if (view == null || !RenderManager.isGood())
return null;
CameraInfo camInfo = view.getCameraInfo();
Future<BufferedImage> fi = RenderManager.inst().renderOffscreen(simModel, 0.0d, camInfo, 640, 480);
// Save and return the image
imageCache.put(example, fi);
return fi;
}
}
}
|
JS: delete call to 'setWindowDefaults' in ExampleBox
Fixes a bug that caused the Control Panel to show example's speed
multiplier value. It is not required because the tool windows are not
shown and because the View size and position defaults are static.
Signed-off-by: Harry King <409587b9e6671aa0763646191d292852dc49a658@gmail.com>
|
src/main/java/com/jaamsim/ui/ExampleBox.java
|
JS: delete call to 'setWindowDefaults' in ExampleBox
|
|
Java
|
apache-2.0
|
03a0de82c3003f2f6dec3b179b87a6ff330f983f
| 0
|
chibenwa/james,rouazana/james,chibenwa/james,aduprat/james,aduprat/james,aduprat/james,aduprat/james,chibenwa/james,rouazana/james,rouazana/james,chibenwa/james,rouazana/james
|
/****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.transport.matchers;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Locale;
import javax.mail.MessagingException;
import org.apache.mailet.Mail;
import org.apache.mailet.MailAddress;
/**
* <P>
* Matches recipients having the mail sender in the recipient's private
* whitelist .
* </P>
* <P>
* The recipient name is always converted to its primary name (handling
* aliases).
* </P>
* <P>
* Configuration string: The database name containing the white list table.
* </P>
* <P>
* Example:
* </P>
*
* <PRE>
* <CODE>
* <mailet match="IsInWhiteList=db://maildb" class="ToProcessor">
* <processor> transport </processor>
* </mailet>
* </CODE>
* </PRE>
*
* @see org.apache.james.transport.mailets.WhiteListManager
* @version SVN $Revision: $ $Date: $
* @since 2.3.0
*/
public class IsInWhiteList extends AbstractSQLWhitelistMatcher {
private String selectByPK;
@Override
public void init() throws javax.mail.MessagingException {
super.init();
selectByPK = sqlQueries.getSqlString("selectByPK", true);
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#getSQLSectionName()
*/
protected String getSQLSectionName() {
return "Whitelist";
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#matchedWhitelist(org.apache.mailet.MailAddress, org.apache.mailet.Mail)
*/
protected boolean matchedWhitelist(MailAddress recipientMailAddress, Mail mail) throws MessagingException {
MailAddress senderMailAddress = mail.getSender();
String senderUser = senderMailAddress.getLocalPart().toLowerCase(Locale.US);
String senderHost = senderMailAddress.getDomain().toLowerCase(Locale.US);
Connection conn = null;
PreparedStatement selectStmt = null;
ResultSet selectRS = null;
try {
try {
String recipientUser = recipientMailAddress.getLocalPart().toLowerCase(Locale.US);
String recipientHost = recipientMailAddress.getDomain().toLowerCase(Locale.US);
if (conn == null) {
conn = datasource.getConnection();
}
if (selectStmt == null) {
selectStmt = conn.prepareStatement(selectByPK);
}
selectStmt.setString(1, recipientUser);
selectStmt.setString(2, recipientHost);
selectStmt.setString(3, senderUser);
selectStmt.setString(4, senderHost);
selectRS = selectStmt.executeQuery();
if (selectRS.next()) {
// This address was already in the list
return true;
}
// check for wildcard domain entries
selectStmt = conn.prepareStatement(selectByPK);
selectStmt.setString(1, recipientUser);
selectStmt.setString(2, recipientHost);
selectStmt.setString(3, "*");
selectStmt.setString(4, senderHost);
selectRS = selectStmt.executeQuery();
if (selectRS.next()) {
// This address was already in the list
return true;
}
// check for wildcard recipient domain entries
selectStmt = conn.prepareStatement(selectByPK);
selectStmt.setString(1, "*");
selectStmt.setString(2, recipientHost);
selectStmt.setString(3, senderUser);
selectStmt.setString(4, senderHost);
selectRS = selectStmt.executeQuery();
if (selectRS.next()) {
// This address was already in the list
return true;
}
// check for wildcard domain entries on both
selectStmt = conn.prepareStatement(selectByPK);
selectStmt.setString(1, "*");
selectStmt.setString(2, recipientHost);
selectStmt.setString(3, "*");
selectStmt.setString(4, senderHost);
selectRS = selectStmt.executeQuery();
if (selectRS.next()) {
// This address was already in the list
return true;
}
} finally {
theJDBCUtil.closeJDBCResultSet(selectRS);
}
} catch (SQLException sqle) {
log("Error accessing database", sqle);
throw new MessagingException("Exception thrown", sqle);
} finally {
theJDBCUtil.closeJDBCStatement(selectStmt);
theJDBCUtil.closeJDBCConnection(conn);
}
return false;
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#getTableCreateQueryName()
*/
protected String getTableCreateQueryName() {
return "createWhiteListTable";
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#getTableName()
*/
protected String getTableName() {
return "whiteListTableName";
}
}
|
mailets/src/main/java/org/apache/james/transport/matchers/IsInWhiteList.java
|
/****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.transport.matchers;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Locale;
import javax.mail.MessagingException;
import org.apache.mailet.Mail;
import org.apache.mailet.MailAddress;
/**
* <P>
* Matches recipients having the mail sender in the recipient's private
* whitelist .
* </P>
* <P>
* The recipient name is always converted to its primary name (handling
* aliases).
* </P>
* <P>
* Configuration string: The database name containing the white list table.
* </P>
* <P>
* Example:
* </P>
*
* <PRE>
* <CODE>
* <mailet match="IsInWhiteList=db://maildb" class="ToProcessor">
* <processor> transport </processor>
* </mailet>
* </CODE>
* </PRE>
*
* @see org.apache.james.transport.mailets.WhiteListManager
* @version SVN $Revision: $ $Date: $
* @since 2.3.0
*/
public class IsInWhiteList extends AbstractSQLWhitelistMatcher {
private String selectByPK;
@Override
public void init() throws javax.mail.MessagingException {
super.init();
selectByPK = sqlQueries.getSqlString("selectByPK", true);
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#getSQLSectionName()
*/
protected String getSQLSectionName() {
return "Whitelist";
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#matchedWhitelist(org.apache.mailet.MailAddress, org.apache.mailet.Mail)
*/
protected boolean matchedWhitelist(MailAddress recipientMailAddress, Mail mail) throws MessagingException {
MailAddress senderMailAddress = mail.getSender();
String senderUser = senderMailAddress.getLocalPart().toLowerCase(Locale.US);
String senderHost = senderMailAddress.getDomain().toLowerCase(Locale.US);
Connection conn = null;
PreparedStatement selectStmt = null;
ResultSet selectRS = null;
try {
try {
String recipientUser = recipientMailAddress.getLocalPart().toLowerCase(Locale.US);
String recipientHost = recipientMailAddress.getDomain().toLowerCase(Locale.US);
if (conn == null) {
conn = datasource.getConnection();
}
if (selectStmt == null) {
selectStmt = conn.prepareStatement(selectByPK);
}
selectStmt.setString(1, recipientUser);
selectStmt.setString(2, recipientHost);
selectStmt.setString(3, senderUser);
selectStmt.setString(4, senderHost);
selectRS = selectStmt.executeQuery();
if (selectRS.next()) {
// This address was already in the list
return true;
}
// check for wildcard domain entries
selectStmt = conn.prepareStatement(selectByPK);
selectStmt.setString(1, recipientUser);
selectStmt.setString(2, recipientHost);
selectStmt.setString(3, "*");
selectStmt.setString(4, senderHost);
selectRS = selectStmt.executeQuery();
if (selectRS.next()) {
// This address was already in the list
return true;
}
} finally {
theJDBCUtil.closeJDBCResultSet(selectRS);
}
} catch (SQLException sqle) {
log("Error accessing database", sqle);
throw new MessagingException("Exception thrown", sqle);
} finally {
theJDBCUtil.closeJDBCStatement(selectStmt);
theJDBCUtil.closeJDBCConnection(conn);
}
return false;
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#getTableCreateQueryName()
*/
protected String getTableCreateQueryName() {
return "createWhiteListTable";
}
/*
* (non-Javadoc)
* @see org.apache.james.transport.matchers.AbstractSQLWhitelistMatcher#getTableName()
*/
protected String getTableName() {
return "whiteListTableName";
}
}
|
Add more wildcard matching for IsInWhitelist
git-svn-id: de9d04cf23151003780adc3e4ddb7078e3680318@986070 13f79535-47bb-0310-9956-ffa450edef68
|
mailets/src/main/java/org/apache/james/transport/matchers/IsInWhiteList.java
|
Add more wildcard matching for IsInWhitelist
|
|
Java
|
apache-2.0
|
34eb10299f14d44ce106810aaaf7f55ad7663635
| 0
|
mtransitapps/ca-hamilton-hsr-bus-parser
|
package org.mtransit.parser.ca_hamilton_hsr_bus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mtransit.commons.CharUtils;
import org.mtransit.commons.CleanUtils;
import org.mtransit.commons.StringUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.mt.data.MAgency;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import static org.mtransit.commons.StringUtils.EMPTY;
// https://www.hamilton.ca/city-initiatives/strategies-actions/open-data-program
// https://googlehsr.hamilton.ca/latest/google_transit.zip
public class HamiltonHSRBusAgencyTools extends DefaultAgencyTools {
public static void main(@NotNull String[] args) {
new HamiltonHSRBusAgencyTools().start(args);
}
@NotNull
@Override
public String getAgencyName() {
return "HSR";
}
public boolean defaultExcludeEnabled() {
return true;
}
@Nullable
@Override
public List<Locale> getSupportedLanguages() {
return LANG_EN;
}
@NotNull
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public boolean defaultRouteIdEnabled() {
return true;
}
@Override
public boolean useRouteShortNameForRouteId() {
return true;
}
@Nullable
@Override
public Long convertRouteIdFromShortNameNotSupported(@NotNull String routeShortName) {
if ("ROCKTON".equalsIgnoreCase(routeShortName)) {
return 4528L;
} else if ("TC".equalsIgnoreCase(routeShortName)) {
return 4531L;
} else if ("SHER".equalsIgnoreCase(routeShortName)) {
return 4530L;
} else if ("BLACK".equalsIgnoreCase(routeShortName)) {
return 4619L;
} else if ("GOLD".equalsIgnoreCase(routeShortName)) {
return 4618L;
} else if ("GREEN".equalsIgnoreCase(routeShortName)) {
return 4621L;
} else if ("RED".equalsIgnoreCase(routeShortName)) {
return 4620L;
}
return super.convertRouteIdFromShortNameNotSupported(routeShortName);
}
@Nullable
@Override
public Long convertRouteIdPreviousChars(@NotNull String previousChars) {
if ("TC".equalsIgnoreCase(previousChars)) {
return 99_000L;
}
return null;
}
private static final Pattern STARTS_WITH_0_ = Pattern.compile("(^0*)");
private static final Pattern DASH_ = Pattern.compile("(\\-+)");
@NotNull
@Override
public String cleanRouteShortName(@NotNull String routeShortName) {
routeShortName = STARTS_WITH_0_.matcher(routeShortName).replaceAll(EMPTY);
routeShortName = DASH_.matcher(routeShortName).replaceAll(EMPTY);
return super.cleanRouteShortName(routeShortName);
}
@Override
public boolean defaultRouteLongNameEnabled() {
return true;
}
@NotNull
@Override
public String cleanRouteLongName(@NotNull String routeLongName) {
routeLongName = CleanUtils.toLowerCaseUpperCaseWords(Locale.ENGLISH, routeLongName, getIgnoredWords());
return super.cleanRouteLongName(routeLongName);
}
@Override
public boolean defaultAgencyColorEnabled() {
return true;
}
private static final String AGENCY_COLOR = "F4CB0B"; // YELLOW (flag)
@NotNull
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public boolean directionFinderEnabled() {
return true;
}
private static final Pattern STARTS_WITH_RSN_LETTER = Pattern.compile("(^[\\d]+[a-z] )", Pattern.CASE_INSENSITIVE);
@Nullable
@Override
public String selectDirectionHeadSign(@Nullable String headSign1, @Nullable String headSign2) {
if (StringUtils.equals(headSign1, headSign2)) {
return null; // can NOT select
}
final boolean startsWithLetter1 = headSign1 != null && STARTS_WITH_RSN_LETTER.matcher(headSign1).find();
final boolean startsWithLetter2 = headSign2 != null && STARTS_WITH_RSN_LETTER.matcher(headSign2).find();
if (startsWithLetter1) {
if (!startsWithLetter2) {
return headSign2;
}
} else if (startsWithLetter2) {
return headSign1;
}
return null;
}
private static final Pattern FIX_BURLINGTON_ = CleanUtils.cleanWords("burlinton");
private static final String FIX_BURLINGTON_REPLACEMENT = CleanUtils.cleanWordsReplacement("Burlington");
private static final Pattern HAMILTON_AIRPORT = CleanUtils.cleanWords("hamilton airport");
private static final String HAMILTON_AIRPORT_REPLACEMENT = CleanUtils.cleanWordsReplacement("Airport");
private static final Pattern HAMILTON_WATERFRONT = CleanUtils.cleanWords("hamilton waterfront");
private static final String HAMILTON_WATERFRONT_REPLACEMENT = CleanUtils.cleanWordsReplacement("Waterfront");
private static final String POWER_CENTRE_SHORT = "PC"; // Power Center
private static final Pattern POWER_CENTRE = CleanUtils.cleanWords("power centre", "power center");
private static final String POWER_CENTRE_REPLACEMENT = CleanUtils.cleanWordsReplacement(POWER_CENTRE_SHORT);
private static final Pattern FIX_HERITAGE_ = CleanUtils.cleanWords("Heratige");
private static final String FIX_HERITAGE_REPLACEMENT = CleanUtils.cleanWordsReplacement("Heritage");
private static final Pattern STARTS_WITH_ALDER_SHOT_GO_DASH_ = Pattern.compile("(^(aldershot go - ))", Pattern.CASE_INSENSITIVE); // route 18
@NotNull
@Override
public String cleanTripHeadsign(@NotNull String tripHeadsign) {
tripHeadsign = CleanUtils.toLowerCaseUpperCaseWords(Locale.ENGLISH, tripHeadsign, getIgnoredWords());
tripHeadsign = CleanUtils.keepToAndRemoveVia(tripHeadsign);
tripHeadsign = FIX_BURLINGTON_.matcher(tripHeadsign).replaceAll(FIX_BURLINGTON_REPLACEMENT);
tripHeadsign = HAMILTON_AIRPORT.matcher(tripHeadsign).replaceAll(HAMILTON_AIRPORT_REPLACEMENT);
tripHeadsign = HAMILTON_WATERFRONT.matcher(tripHeadsign).replaceAll(HAMILTON_WATERFRONT_REPLACEMENT);
tripHeadsign = POWER_CENTRE.matcher(tripHeadsign).replaceAll(POWER_CENTRE_REPLACEMENT);
tripHeadsign = FIX_HERITAGE_.matcher(tripHeadsign).replaceAll(FIX_HERITAGE_REPLACEMENT);
tripHeadsign = STARTS_WITH_ALDER_SHOT_GO_DASH_.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = CleanUtils.CLEAN_AT.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
tripHeadsign = CleanUtils.CLEAN_AND.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
tripHeadsign = CleanUtils.SAINT.matcher(tripHeadsign).replaceAll(CleanUtils.SAINT_REPLACEMENT);
tripHeadsign = CleanUtils.fixMcXCase(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private String[] getIgnoredWords() {
return new String[]{
"CC", "GO", "P&R", "TC", "VIA",
};
}
@NotNull
@Override
public String cleanStopName(@NotNull String gStopName) {
gStopName = CleanUtils.toLowerCaseUpperCaseWords(Locale.ENGLISH, gStopName, getIgnoredWords());
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CleanUtils.CLEAN_AND.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
gStopName = CleanUtils.fixMcXCase(gStopName);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(@NotNull GStop gStop) {
//noinspection deprecation
String stopId = gStop.getStopId();
if (stopId.length() > 0) {
if (CharUtils.isDigitsOnly(stopId)) {
return Integer.parseInt(stopId);
}
stopId = CleanUtils.cleanMergedID(stopId);
if (CharUtils.isDigitsOnly(stopId)) {
return Integer.parseInt(stopId);
}
}
throw new MTLog.Fatal("Unexpected stop ID for %s!", gStop);
}
}
|
src/main/java/org/mtransit/parser/ca_hamilton_hsr_bus/HamiltonHSRBusAgencyTools.java
|
package org.mtransit.parser.ca_hamilton_hsr_bus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mtransit.commons.CharUtils;
import org.mtransit.commons.CleanUtils;
import org.mtransit.commons.StringUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.mt.data.MAgency;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import static org.mtransit.commons.StringUtils.EMPTY;
// https://www.hamilton.ca/city-initiatives/strategies-actions/open-data-program
// https://googlehsr.hamilton.ca/latest/google_transit.zip
public class HamiltonHSRBusAgencyTools extends DefaultAgencyTools {
public static void main(@NotNull String[] args) {
new HamiltonHSRBusAgencyTools().start(args);
}
@NotNull
@Override
public String getAgencyName() {
return "HSR";
}
public boolean defaultExcludeEnabled() {
return true;
}
@Nullable
@Override
public List<Locale> getSupportedLanguages() {
return LANG_EN;
}
@NotNull
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public boolean defaultRouteIdEnabled() {
return true;
}
@Override
public boolean useRouteShortNameForRouteId() {
return true;
}
@Nullable
@Override
public Long convertRouteIdFromShortNameNotSupported(@NotNull String routeShortName) {
if ("ROCKTON".equalsIgnoreCase(routeShortName)) {
return 4528L;
} else if ("TC".equalsIgnoreCase(routeShortName)) {
return 4531L;
} else if ("SHER".equalsIgnoreCase(routeShortName)) {
return 4530L;
} else if ("BLACK".equalsIgnoreCase(routeShortName)) {
return 4619L;
} else if ("GOLD".equalsIgnoreCase(routeShortName)) {
return 4618L;
} else if ("GREEN".equalsIgnoreCase(routeShortName)) {
return 4621L;
} else if ("RED".equalsIgnoreCase(routeShortName)) {
return 4620L;
}
return super.convertRouteIdFromShortNameNotSupported(routeShortName);
}
private static final Pattern STARTS_WITH_0_ = Pattern.compile("(^0*)");
@NotNull
@Override
public String cleanRouteShortName(@NotNull String routeShortName) {
routeShortName = STARTS_WITH_0_.matcher(routeShortName).replaceAll(EMPTY);
return super.cleanRouteShortName(routeShortName);
}
@Override
public boolean defaultRouteLongNameEnabled() {
return true;
}
@NotNull
@Override
public String cleanRouteLongName(@NotNull String routeLongName) {
routeLongName = CleanUtils.toLowerCaseUpperCaseWords(Locale.ENGLISH, routeLongName, getIgnoredWords());
return super.cleanRouteLongName(routeLongName);
}
@Override
public boolean defaultAgencyColorEnabled() {
return true;
}
private static final String AGENCY_COLOR = "F4CB0B"; // YELLOW (flag)
@NotNull
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public boolean directionFinderEnabled() {
return true;
}
private static final Pattern STARTS_WITH_RSN_LETTER = Pattern.compile("(^[\\d]+[a-z] )", Pattern.CASE_INSENSITIVE);
@Nullable
@Override
public String selectDirectionHeadSign(@Nullable String headSign1, @Nullable String headSign2) {
if (StringUtils.equals(headSign1, headSign2)) {
return null; // can NOT select
}
final boolean startsWithLetter1 = headSign1 != null && STARTS_WITH_RSN_LETTER.matcher(headSign1).find();
final boolean startsWithLetter2 = headSign2 != null && STARTS_WITH_RSN_LETTER.matcher(headSign2).find();
if (startsWithLetter1) {
if (!startsWithLetter2) {
return headSign2;
}
} else if (startsWithLetter2) {
return headSign1;
}
return null;
}
private static final Pattern FIX_BURLINGTON_ = CleanUtils.cleanWords("burlinton");
private static final String FIX_BURLINGTON_REPLACEMENT = CleanUtils.cleanWordsReplacement("Burlington");
private static final Pattern HAMILTON_AIRPORT = CleanUtils.cleanWords("hamilton airport");
private static final String HAMILTON_AIRPORT_REPLACEMENT = CleanUtils.cleanWordsReplacement("Airport");
private static final Pattern HAMILTON_WATERFRONT = CleanUtils.cleanWords("hamilton waterfront");
private static final String HAMILTON_WATERFRONT_REPLACEMENT = CleanUtils.cleanWordsReplacement("Waterfront");
private static final String POWER_CENTRE_SHORT = "PC"; // Power Center
private static final Pattern POWER_CENTRE = CleanUtils.cleanWords("power centre", "power center");
private static final String POWER_CENTRE_REPLACEMENT = CleanUtils.cleanWordsReplacement(POWER_CENTRE_SHORT);
private static final Pattern FIX_HERITAGE_ = CleanUtils.cleanWords("Heratige");
private static final String FIX_HERITAGE_REPLACEMENT = CleanUtils.cleanWordsReplacement("Heritage");
private static final Pattern STARTS_WITH_ALDER_SHOT_GO_DASH_ = Pattern.compile("(^(aldershot go - ))", Pattern.CASE_INSENSITIVE); // route 18
@NotNull
@Override
public String cleanTripHeadsign(@NotNull String tripHeadsign) {
tripHeadsign = CleanUtils.toLowerCaseUpperCaseWords(Locale.ENGLISH, tripHeadsign, getIgnoredWords());
tripHeadsign = CleanUtils.keepToAndRemoveVia(tripHeadsign);
tripHeadsign = FIX_BURLINGTON_.matcher(tripHeadsign).replaceAll(FIX_BURLINGTON_REPLACEMENT);
tripHeadsign = HAMILTON_AIRPORT.matcher(tripHeadsign).replaceAll(HAMILTON_AIRPORT_REPLACEMENT);
tripHeadsign = HAMILTON_WATERFRONT.matcher(tripHeadsign).replaceAll(HAMILTON_WATERFRONT_REPLACEMENT);
tripHeadsign = POWER_CENTRE.matcher(tripHeadsign).replaceAll(POWER_CENTRE_REPLACEMENT);
tripHeadsign = FIX_HERITAGE_.matcher(tripHeadsign).replaceAll(FIX_HERITAGE_REPLACEMENT);
tripHeadsign = STARTS_WITH_ALDER_SHOT_GO_DASH_.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = CleanUtils.CLEAN_AT.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
tripHeadsign = CleanUtils.CLEAN_AND.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
tripHeadsign = CleanUtils.SAINT.matcher(tripHeadsign).replaceAll(CleanUtils.SAINT_REPLACEMENT);
tripHeadsign = CleanUtils.fixMcXCase(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private String[] getIgnoredWords() {
return new String[]{
"CC", "GO", "P&R", "TC", "VIA",
};
}
@NotNull
@Override
public String cleanStopName(@NotNull String gStopName) {
gStopName = CleanUtils.toLowerCaseUpperCaseWords(Locale.ENGLISH, gStopName, getIgnoredWords());
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CleanUtils.CLEAN_AND.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
gStopName = CleanUtils.fixMcXCase(gStopName);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(@NotNull GStop gStop) {
//noinspection deprecation
String stopId = gStop.getStopId();
if (stopId.length() > 0) {
if (CharUtils.isDigitsOnly(stopId)) {
return Integer.parseInt(stopId);
}
stopId = CleanUtils.cleanMergedID(stopId);
if (CharUtils.isDigitsOnly(stopId)) {
return Integer.parseInt(stopId);
}
}
throw new MTLog.Fatal("Unexpected stop ID for %s!", gStop);
}
}
|
Compatibility with latest update
|
src/main/java/org/mtransit/parser/ca_hamilton_hsr_bus/HamiltonHSRBusAgencyTools.java
|
Compatibility with latest update
|
|
Java
|
apache-2.0
|
00736bba70ddfb90e013f92ed23de6ec4aba060c
| 0
|
joansmith/orientdb,cstamas/orientdb,allanmoso/orientdb,wouterv/orientdb,allanmoso/orientdb,orientechnologies/orientdb,mmacfadden/orientdb,mbhulin/orientdb,orientechnologies/orientdb,tempbottle/orientdb,alonsod86/orientdb,joansmith/orientdb,sanyaade-g2g-repos/orientdb,tempbottle/orientdb,wyzssw/orientdb,alonsod86/orientdb,giastfader/orientdb,mmacfadden/orientdb,intfrr/orientdb,intfrr/orientdb,orientechnologies/orientdb,alonsod86/orientdb,intfrr/orientdb,sanyaade-g2g-repos/orientdb,mmacfadden/orientdb,allanmoso/orientdb,mmacfadden/orientdb,mbhulin/orientdb,cstamas/orientdb,mbhulin/orientdb,tempbottle/orientdb,orientechnologies/orientdb,rprabhat/orientdb,cstamas/orientdb,giastfader/orientdb,wouterv/orientdb,joansmith/orientdb,giastfader/orientdb,cstamas/orientdb,tempbottle/orientdb,jdillon/orientdb,wyzssw/orientdb,rprabhat/orientdb,jdillon/orientdb,intfrr/orientdb,mbhulin/orientdb,giastfader/orientdb,wyzssw/orientdb,alonsod86/orientdb,wouterv/orientdb,joansmith/orientdb,wouterv/orientdb,jdillon/orientdb,sanyaade-g2g-repos/orientdb,rprabhat/orientdb,wyzssw/orientdb,allanmoso/orientdb,rprabhat/orientdb,sanyaade-g2g-repos/orientdb
|
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.db;
import java.util.List;
import java.util.Set;
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.db.object.ODatabaseObject;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.dictionary.ODictionary;
import com.orientechnologies.orient.core.exception.OTransactionException;
import com.orientechnologies.orient.core.hook.ORecordHook;
import com.orientechnologies.orient.core.hook.ORecordHook.HOOK_POSITION;
import com.orientechnologies.orient.core.hook.ORecordHook.RESULT;
import com.orientechnologies.orient.core.hook.ORecordHook.TYPE;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.metadata.OMetadata;
import com.orientechnologies.orient.core.metadata.security.OSecurity;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.query.OQuery;
import com.orientechnologies.orient.core.storage.ORecordCallback;
import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.tx.OTransaction;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.core.version.ORecordVersion;
/**
* Database interface that represents a complex database. It extends the base ODatabase interface adding all the higher-level APIs
* to treats records. Entities can be implementations of ORecord class for ODatabaseRecord or any POJO for ODatabaseObject. The
* behaviour of the datastore depends by the OStorage implementation used.
*
* @author Luca Garulli
*
* @see ODatabaseRecord
* @see ODatabaseObject
* @see OStorage
* @param <T>
*/
public interface ODatabaseComplex<T extends Object> extends ODatabase, OUserObject2RecordHandler {
public enum OPERATION_MODE {
SYNCHRONOUS, ASYNCHRONOUS, ASYNCHRONOUS_NOANSWER
}
/**
* Creates a new entity instance.
*
* @return The new instance.
*/
public <RET extends Object> RET newInstance();
/**
* Returns the Dictionary manual index.
*
* @return ODictionary instance
*/
public ODictionary<T> getDictionary();
/**
* Returns the current user logged into the database.
*
* @see OSecurity
*/
public OUser getUser();
/**
* Set user for current database instance
*/
public void setUser(OUser user);
/**
* Loads the entity and return it.
*
* @param iObject
* The entity to load. If the entity was already loaded it will be reloaded and all the changes will be lost.
* @return
*/
public <RET extends T> RET load(T iObject);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan, boolean iIgnoreCache);
/**
* Force the reloading of the entity.
*
* @param iObject
* The entity to load. If the entity was already loaded it will be reloaded and all the changes will be lost.
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The loaded entity
*/
public <RET extends T> RET reload(final T iObject, String iFetchPlan, boolean iIgnoreCache);
/**
* Loads the entity by the Record ID.
*
* @param iRecordId
* The unique record id of the entity to load.
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId);
/**
* Loads the entity by the Record ID using a fetch plan.
*
* @param iRecordId
* The unique record id of the entity to load.
* @param iFetchPlan
* Fetch plan used
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan);
/**
* Loads the entity by the Record ID using a fetch plan and specifying if the cache must be ignored.
*
* @param iRecordId
* The unique record id of the entity to load.
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache);
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone);
/**
* Saves an entity in synchronous mode. If the entity is not dirty, then the operation will be ignored. For custom entity
* implementations assure to set the entity as dirty.
*
* @param iObject
* The entity to save
* @return The saved entity.
*/
public <RET extends T> RET save(T iObject);
/**
* Saves an entity specifying the mode. If the entity is not dirty, then the operation will be ignored. For custom entity
* implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
*
* @param iObject
* The entity to save
* @param iMode
* Mode of save: synchronous (default) or asynchronous
* @param iForceCreate
* Flag that indicates that record should be created. If record with current rid already exists, exception is thrown
* @param iRecordCreatedCallback
* @param iRecordUpdatedCallback
*/
public <RET extends T> RET save(T iObject, OPERATION_MODE iMode, boolean iForceCreate,
ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback);
/**
* Saves an entity in the specified cluster in synchronous mode. If the entity is not dirty, then the operation will be ignored.
* For custom entity implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
* @param iObject
* The entity to save
* @param iClusterName
* Name of the cluster where to save
* @return The saved entity.
*/
public <RET extends T> RET save(T iObject, String iClusterName);
public boolean updatedReplica(T iObject);
/**
* Saves an entity in the specified cluster specifying the mode. If the entity is not dirty, then the operation will be ignored.
* For custom entity implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
*
* @param iObject
* The entity to save
* @param iClusterName
* Name of the cluster where to save
* @param iMode
* Mode of save: synchronous (default) or asynchronous
* @param iForceCreate
* Flag that indicates that record should be created. If record with current rid already exists, exception is thrown
* @param iRecordCreatedCallback
* @param iRecordUpdatedCallback
*/
public <RET extends T> RET save(T iObject, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback);
/**
* Deletes an entity from the database in synchronous mode.
*
* @param iObject
* The entity to delete.
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(T iObject);
/**
* Deletes the entity with the received RID from the database.
*
* @param iRID
* The RecordID to delete.
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(ORID iRID);
/**
* Deletes the entity with the received RID from the database.
*
* @param iRID
* The RecordID to delete.
* @param iVersion
* for MVCC
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(ORID iRID, ORecordVersion iVersion);
public ODatabaseComplex<T> cleanOutRecord(ORID rid, ORecordVersion version);
/**
* Return active transaction. Cannot be null. If no transaction is active, then a OTransactionNoTx instance is returned.
*
* @return OTransaction implementation
*/
public OTransaction getTransaction();
/**
* Begins a new transaction. By default the type is OPTIMISTIC. If a previous transaction was started it will be rollbacked and
* closed before to start a new one. A transaction once begun has to be closed by calling the {@link #commit()} or
* {@link #rollback()}.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin();
/**
* Begins a new transaction specifying the transaction type. If a previous transaction was started it will be rollbacked and
* closed before to start a new one. A transaction once begun has to be closed by calling the {@link #commit()} or
* {@link #rollback()}.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin(TXTYPE iStatus);
/**
* Attaches a transaction as current.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin(OTransaction iTx) throws OTransactionException;
/**
* Commits the current transaction. The approach is all or nothing. All changes will be permanent following the storage type. If
* the operation succeed all the entities changed inside the transaction context will be effectives. If the operation fails, all
* the changed entities will be restored in the datastore. Memory instances are not guaranteed to being restored as well.
*
* @return
*/
public ODatabaseComplex<T> commit() throws OTransactionException;
/**
* Aborts the current running transaction. All the pending changed entities will be restored in the datastore. Memory instances
* are not guaranteed to being restored as well.
*
* @return
*/
public ODatabaseComplex<T> rollback() throws OTransactionException;
/**
* Execute a query against the database.
*
* @param iCommand
* Query command
* @param iArgs
* Optional parameters to bind to the query
* @return List of POJOs
*/
public <RET extends List<?>> RET query(final OQuery<?> iCommand, final Object... iArgs);
/**
* Execute a command against the database. A command can be a SQL statement or a Procedure. If the OStorage used is remote
* (OStorageRemote) then the command will be executed remotely and the result returned back to the calling client.
*
* @param iCommand
* Command request to execute.
* @return The same Command request received as parameter.
* @see OStorageRemote
*/
public <RET extends OCommandRequest> RET command(OCommandRequest iCommand);
/**
* Return the OMetadata instance. Cannot be null.
*
* @return The OMetadata instance.
*/
public OMetadata getMetadata();
/**
* Returns the database owner. Used in wrapped instances to know the up level ODatabase instance.
*
* @return Returns the database owner.
*/
public ODatabaseComplex<?> getDatabaseOwner();
/**
* Internal. Sets the database owner.
*/
public ODatabaseComplex<?> setDatabaseOwner(ODatabaseComplex<?> iOwner);
/**
* Return the underlying database. Used in wrapper instances to know the down level ODatabase instance.
*
* @return The underlying ODatabase implementation.
*/
public <DB extends ODatabase> DB getUnderlying();
/**
* Internal method. Don't call it directly unless you're building an internal component.
*/
public void setInternal(ATTRIBUTES attribute, Object iValue);
/**
* Registers a hook to listen all events for Records.
*
* @param iHookImpl
* ORecordHook implementation
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB registerHook(ORecordHook iHookImpl);
public <DB extends ODatabaseComplex<?>> DB registerHook(final ORecordHook iHookImpl, HOOK_POSITION iPosition);
/**
* Retrieves all the registered hooks.
*
* @return A not-null unmodifiable set of ORecordHook instances. If there are no hooks registered, the Set is empty.
*/
public Set<ORecordHook> getHooks();
/**
* Unregisters a previously registered hook.
*
* @param iHookImpl
* ORecordHook implementation
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB unregisterHook(ORecordHook iHookImpl);
/**
* Invokes the callback on all the configured hooks.
*
* @param iObject
* The object passed change based on the Database implementation: records for {@link ODatabaseRecord} implementations and
* POJO for {@link ODatabaseObject} implementations.
* @return True if the input record is changed, otherwise false
*/
public RESULT callbackHooks(TYPE iType, OIdentifiable iObject);
/**
* Returns if the Multi Version Concurrency Control is enabled or not. If enabled the version of the record is checked before each
* update and delete against the records.
*
* @return true if enabled, otherwise false
* @see ODatabaseRecord#setMVCC(boolean)
*/
public boolean isMVCC();
/**
* Enables or disables the Multi-Version Concurrency Control. If enabled the version of the record is checked before each update
* and delete against the records.
*
* @param iValue
* @see ODatabaseRecord#isMVCC()
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB setMVCC(boolean iValue);
public String getType();
}
|
core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseComplex.java
|
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.db;
import java.util.List;
import java.util.Set;
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.db.object.ODatabaseObject;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.dictionary.ODictionary;
import com.orientechnologies.orient.core.exception.OTransactionException;
import com.orientechnologies.orient.core.hook.ORecordHook;
import com.orientechnologies.orient.core.hook.ORecordHook.HOOK_POSITION;
import com.orientechnologies.orient.core.hook.ORecordHook.RESULT;
import com.orientechnologies.orient.core.hook.ORecordHook.TYPE;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.metadata.OMetadata;
import com.orientechnologies.orient.core.metadata.security.OSecurity;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.query.OQuery;
import com.orientechnologies.orient.core.storage.ORecordCallback;
import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.tx.OTransaction;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.core.version.ORecordVersion;
/**
* Database interface that represents a complex database. It extends the base ODatabase interface adding all the higher-level APIs
* to treats records. Entities can be implementations of ORecord class for ODatabaseRecord or any POJO for ODatabaseObject. The
* behaviour of the datastore depends by the OStorage implementation used.
*
* @author Luca Garulli
*
* @see ODatabaseRecord
* @see ODatabaseObject
* @see OStorage
* @param <T>
*/
public interface ODatabaseComplex<T extends Object> extends ODatabase, OUserObject2RecordHandler {
public enum OPERATION_MODE {
SYNCHRONOUS, ASYNCHRONOUS, ASYNCHRONOUS_NOANSWER
}
/**
* Creates a new entity instance.
*
* @return The new instance.
*/
public <RET extends Object> RET newInstance();
/**
* Returns the Dictionary manual index.
*
* @return ODictionary instance
*/
public ODictionary<T> getDictionary();
/**
* Returns the current user logged into the database.
*
* @see OSecurity
*/
public OUser getUser();
/**
* Loads the entity and return it.
*
* @param iObject
* The entity to load. If the entity was already loaded it will be reloaded and all the changes will be lost.
* @return
*/
public <RET extends T> RET load(T iObject);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone);
/**
* Loads a record using a fetch plan.
*
* @param iObject
* Record to load
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The record received
*/
public <RET extends T> RET load(T iObject, String iFetchPlan, boolean iIgnoreCache);
/**
* Force the reloading of the entity.
*
* @param iObject
* The entity to load. If the entity was already loaded it will be reloaded and all the changes will be lost.
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The loaded entity
*/
public <RET extends T> RET reload(final T iObject, String iFetchPlan, boolean iIgnoreCache);
/**
* Loads the entity by the Record ID.
*
* @param iRecordId
* The unique record id of the entity to load.
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId);
/**
* Loads the entity by the Record ID using a fetch plan.
*
* @param iRecordId
* The unique record id of the entity to load.
* @param iFetchPlan
* Fetch plan used
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan);
/**
* Loads the entity by the Record ID using a fetch plan and specifying if the cache must be ignored.
*
* @param iRecordId
* The unique record id of the entity to load.
* @param iFetchPlan
* Fetch plan used
* @param iIgnoreCache
* Ignore cache or use it
* @return The loaded entity
*/
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache);
public <RET extends T> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone);
/**
* Saves an entity in synchronous mode. If the entity is not dirty, then the operation will be ignored. For custom entity
* implementations assure to set the entity as dirty.
*
* @param iObject
* The entity to save
* @return The saved entity.
*/
public <RET extends T> RET save(T iObject);
/**
* Saves an entity specifying the mode. If the entity is not dirty, then the operation will be ignored. For custom entity
* implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
*
* @param iObject
* The entity to save
* @param iMode
* Mode of save: synchronous (default) or asynchronous
* @param iForceCreate
* Flag that indicates that record should be created. If record with current rid already exists, exception is thrown
* @param iRecordCreatedCallback
* @param iRecordUpdatedCallback
*/
public <RET extends T> RET save(T iObject, OPERATION_MODE iMode, boolean iForceCreate,
ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback);
/**
* Saves an entity in the specified cluster in synchronous mode. If the entity is not dirty, then the operation will be ignored.
* For custom entity implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
* @param iObject
* The entity to save
* @param iClusterName
* Name of the cluster where to save
* @return The saved entity.
*/
public <RET extends T> RET save(T iObject, String iClusterName);
public boolean updatedReplica(T iObject);
/**
* Saves an entity in the specified cluster specifying the mode. If the entity is not dirty, then the operation will be ignored.
* For custom entity implementations assure to set the entity as dirty. If the cluster does not exist, an error will be thrown.
*
*
* @param iObject
* The entity to save
* @param iClusterName
* Name of the cluster where to save
* @param iMode
* Mode of save: synchronous (default) or asynchronous
* @param iForceCreate
* Flag that indicates that record should be created. If record with current rid already exists, exception is thrown
* @param iRecordCreatedCallback
* @param iRecordUpdatedCallback
*/
public <RET extends T> RET save(T iObject, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback);
/**
* Deletes an entity from the database in synchronous mode.
*
* @param iObject
* The entity to delete.
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(T iObject);
/**
* Deletes the entity with the received RID from the database.
*
* @param iRID
* The RecordID to delete.
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(ORID iRID);
/**
* Deletes the entity with the received RID from the database.
*
* @param iRID
* The RecordID to delete.
* @param iVersion
* for MVCC
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> delete(ORID iRID, ORecordVersion iVersion);
public ODatabaseComplex<T> cleanOutRecord(ORID rid, ORecordVersion version);
/**
* Return active transaction. Cannot be null. If no transaction is active, then a OTransactionNoTx instance is returned.
*
* @return OTransaction implementation
*/
public OTransaction getTransaction();
/**
* Begins a new transaction. By default the type is OPTIMISTIC. If a previous transaction was started it will be rollbacked and
* closed before to start a new one. A transaction once begun has to be closed by calling the {@link #commit()} or
* {@link #rollback()}.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin();
/**
* Begins a new transaction specifying the transaction type. If a previous transaction was started it will be rollbacked and
* closed before to start a new one. A transaction once begun has to be closed by calling the {@link #commit()} or
* {@link #rollback()}.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin(TXTYPE iStatus);
/**
* Attaches a transaction as current.
*
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODatabaseComplex<T> begin(OTransaction iTx) throws OTransactionException;
/**
* Commits the current transaction. The approach is all or nothing. All changes will be permanent following the storage type. If
* the operation succeed all the entities changed inside the transaction context will be effectives. If the operation fails, all
* the changed entities will be restored in the datastore. Memory instances are not guaranteed to being restored as well.
*
* @return
*/
public ODatabaseComplex<T> commit() throws OTransactionException;
/**
* Aborts the current running transaction. All the pending changed entities will be restored in the datastore. Memory instances
* are not guaranteed to being restored as well.
*
* @return
*/
public ODatabaseComplex<T> rollback() throws OTransactionException;
/**
* Execute a query against the database.
*
* @param iCommand
* Query command
* @param iArgs
* Optional parameters to bind to the query
* @return List of POJOs
*/
public <RET extends List<?>> RET query(final OQuery<?> iCommand, final Object... iArgs);
/**
* Execute a command against the database. A command can be a SQL statement or a Procedure. If the OStorage used is remote
* (OStorageRemote) then the command will be executed remotely and the result returned back to the calling client.
*
* @param iCommand
* Command request to execute.
* @return The same Command request received as parameter.
* @see OStorageRemote
*/
public <RET extends OCommandRequest> RET command(OCommandRequest iCommand);
/**
* Return the OMetadata instance. Cannot be null.
*
* @return The OMetadata instance.
*/
public OMetadata getMetadata();
/**
* Returns the database owner. Used in wrapped instances to know the up level ODatabase instance.
*
* @return Returns the database owner.
*/
public ODatabaseComplex<?> getDatabaseOwner();
/**
* Internal. Sets the database owner.
*/
public ODatabaseComplex<?> setDatabaseOwner(ODatabaseComplex<?> iOwner);
/**
* Return the underlying database. Used in wrapper instances to know the down level ODatabase instance.
*
* @return The underlying ODatabase implementation.
*/
public <DB extends ODatabase> DB getUnderlying();
/**
* Internal method. Don't call it directly unless you're building an internal component.
*/
public void setInternal(ATTRIBUTES attribute, Object iValue);
/**
* Registers a hook to listen all events for Records.
*
* @param iHookImpl
* ORecordHook implementation
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB registerHook(ORecordHook iHookImpl);
public <DB extends ODatabaseComplex<?>> DB registerHook(final ORecordHook iHookImpl, HOOK_POSITION iPosition);
/**
* Retrieves all the registered hooks.
*
* @return A not-null unmodifiable set of ORecordHook instances. If there are no hooks registered, the Set is empty.
*/
public Set<ORecordHook> getHooks();
/**
* Unregisters a previously registered hook.
*
* @param iHookImpl
* ORecordHook implementation
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB unregisterHook(ORecordHook iHookImpl);
/**
* Invokes the callback on all the configured hooks.
*
* @param iObject
* The object passed change based on the Database implementation: records for {@link ODatabaseRecord} implementations and
* POJO for {@link ODatabaseObject} implementations.
* @return True if the input record is changed, otherwise false
*/
public RESULT callbackHooks(TYPE iType, OIdentifiable iObject);
/**
* Returns if the Multi Version Concurrency Control is enabled or not. If enabled the version of the record is checked before each
* update and delete against the records.
*
* @return true if enabled, otherwise false
* @see ODatabaseRecord#setMVCC(boolean)
*/
public boolean isMVCC();
/**
* Enables or disables the Multi-Version Concurrency Control. If enabled the version of the record is checked before each update
* and delete against the records.
*
* @param iValue
* @see ODatabaseRecord#isMVCC()
* @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public <DB extends ODatabaseComplex<?>> DB setMVCC(boolean iValue);
public String getType();
}
|
Update core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseComplex.java
Allow set user back to current database instance
|
core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseComplex.java
|
Update core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseComplex.java
|
|
Java
|
apache-2.0
|
c7cfbba898b7198e445db8e615cfaf635ad0f8bf
| 0
|
mthiele/mvvmFX,sialcasa/mvvmFX,mthiele/mvvmFX,ThirstyGoat/mvvmFX,ThirstyGoat/mvvmFX,sialcasa/mvvmFX,ThirstyGoat/mvvmFX,sialcasa/mvvmFX,mthiele/mvvmFX
|
package de.saxsys.mvvmfx.utils.commands;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import de.saxsys.mvvmfx.testingutils.GCVerifier;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.value.ChangeListener;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class CompositeCommandTest {
private BooleanProperty condition1;
private BooleanProperty called1;
private DelegateCommand delegateCommand1;
private BooleanProperty condition2;
private BooleanProperty called2;
private DelegateCommand delegateCommand2;
@Before
public void init() {
condition1 = new SimpleBooleanProperty(true);
called1 = new SimpleBooleanProperty();
delegateCommand1 = new DelegateCommand(() -> called1.set(true), condition1);
condition2 = new SimpleBooleanProperty(true);
called2 = new SimpleBooleanProperty();
delegateCommand2 = new DelegateCommand(() -> called2.set(true), condition2);
}
@Test
public void executable() throws Exception {
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2);
GCVerifier.forceGC();
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
condition1.set(false);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
condition2.set(false);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
condition1.set(true);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
condition2.set(true);
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
}
@Test
public void executable2() throws Exception {
CompositeCommand compositeCommand = new CompositeCommand();
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
compositeCommand.register(delegateCommand1);
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
condition1.setValue(false);
assertThat(compositeCommand.isExecutable()).isFalse();
assertThat(compositeCommand.executableProperty().get()).isFalse();
condition1.setValue(true);
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
condition2.setValue(false);
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
compositeCommand.register(delegateCommand2);
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isFalse();
assertThat(compositeCommand.executableProperty().get()).isFalse();
compositeCommand.unregister(delegateCommand2);
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
}
@Test
public void register() throws Exception {
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1);
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
// prepare delegateCommand2
condition2.set(false);
compositeCommand.register(delegateCommand2);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
compositeCommand.unregister(delegateCommand2);
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
}
@Test
public void running() throws Exception {
BooleanProperty run = new SimpleBooleanProperty();
BooleanProperty finished = new SimpleBooleanProperty();
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2);
// We have to check the running Property with this mechanism, because it is processed synchronously and we can't
// hook between the state changes.
compositeCommand.runningProperty().addListener((ChangeListener<Boolean>) (observable, oldValue, newValue) -> {
if (!oldValue && newValue)
run.set(true);
if (oldValue && !newValue)
finished.set(true);
});
compositeCommand.execute();
assertTrue(run.get());
assertTrue(finished.get());
}
@Test
@Ignore("ignore until fixed")
public void allCommandsAreUnregistered() throws Exception{
// UncaughtExceptionHandler is defined to be able to detect exception from listeners.
Thread.currentThread().setUncaughtExceptionHandler((thread, exception) -> fail("Exception was thrown", exception));
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2);
compositeCommand.unregister(delegateCommand1);
compositeCommand.unregister(delegateCommand2);
}
@Test
public void longRunningAsyncComposite() throws Exception {
BooleanProperty condition = new SimpleBooleanProperty(true);
CompletableFuture<Void> future = new CompletableFuture<>();
DelegateCommand delegateCommand1 = new DelegateCommand(() -> {
sleep(500);
}, condition, true);
DelegateCommand delegateCommand2 = new DelegateCommand(() -> {
sleep(1000);
future.complete(null);
}, condition, true);
DelegateCommand delegateCommand3 = new DelegateCommand(() -> {
}, condition, false);
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2, delegateCommand3);
assertFalse(compositeCommand.runningProperty().get());
assertFalse(delegateCommand1.runningProperty().get());
assertFalse(delegateCommand2.runningProperty().get());
assertFalse(delegateCommand3.runningProperty().get());
compositeCommand.execute();
assertTrue(compositeCommand.runningProperty().get());
assertTrue(delegateCommand1.runningProperty().get());
assertTrue(delegateCommand2.runningProperty().get());
assertFalse(delegateCommand3.runningProperty().get());
future.get(3, TimeUnit.SECONDS);
assertFalse(compositeCommand.runningProperty().get());
assertFalse(delegateCommand1.runningProperty().get());
assertFalse(delegateCommand2.runningProperty().get());
assertFalse(delegateCommand3.runningProperty().get());
}
private void sleep(long millis) {
try {
Thread.sleep(millis);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
mvvmfx/src/test/java/de/saxsys/mvvmfx/utils/commands/CompositeCommandTest.java
|
package de.saxsys.mvvmfx.utils.commands;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import de.saxsys.mvvmfx.testingutils.GCVerifier;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.value.ChangeListener;
import org.junit.Before;
import org.junit.Test;
public class CompositeCommandTest {
private BooleanProperty condition1;
private BooleanProperty called1;
private DelegateCommand delegateCommand1;
private BooleanProperty condition2;
private BooleanProperty called2;
private DelegateCommand delegateCommand2;
@Before
public void init() {
condition1 = new SimpleBooleanProperty(true);
called1 = new SimpleBooleanProperty();
delegateCommand1 = new DelegateCommand(() -> called1.set(true), condition1);
condition2 = new SimpleBooleanProperty(true);
called2 = new SimpleBooleanProperty();
delegateCommand2 = new DelegateCommand(() -> called2.set(true), condition2);
}
@Test
public void executable() throws Exception {
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2);
GCVerifier.forceGC();
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
condition1.set(false);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
condition2.set(false);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
condition1.set(true);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
condition2.set(true);
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
}
@Test
public void executable2() throws Exception {
CompositeCommand compositeCommand = new CompositeCommand();
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
compositeCommand.register(delegateCommand1);
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
condition1.setValue(false);
assertThat(compositeCommand.isExecutable()).isFalse();
assertThat(compositeCommand.executableProperty().get()).isFalse();
condition1.setValue(true);
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
condition2.setValue(false);
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
compositeCommand.register(delegateCommand2);
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isFalse();
assertThat(compositeCommand.executableProperty().get()).isFalse();
compositeCommand.unregister(delegateCommand2);
GCVerifier.forceGC();
assertThat(compositeCommand.isExecutable()).isTrue();
assertThat(compositeCommand.executableProperty().get()).isTrue();
}
@Test
public void register() throws Exception {
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1);
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
// prepare delegateCommand2
condition2.set(false);
compositeCommand.register(delegateCommand2);
assertFalse(compositeCommand.isExecutable());
assertFalse(compositeCommand.executableProperty().get());
compositeCommand.unregister(delegateCommand2);
assertTrue(compositeCommand.isExecutable());
assertTrue(compositeCommand.executableProperty().get());
}
@Test
public void running() throws Exception {
BooleanProperty run = new SimpleBooleanProperty();
BooleanProperty finished = new SimpleBooleanProperty();
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2);
// We have to check the running Property with this mechanism, because it is processed synchronously and we can't
// hook between the state changes.
compositeCommand.runningProperty().addListener((ChangeListener<Boolean>) (observable, oldValue, newValue) -> {
if (!oldValue && newValue)
run.set(true);
if (oldValue && !newValue)
finished.set(true);
});
compositeCommand.execute();
assertTrue(run.get());
assertTrue(finished.get());
}
@Test
public void longRunningAsyncComposite() throws Exception {
BooleanProperty condition = new SimpleBooleanProperty(true);
CompletableFuture<Void> future = new CompletableFuture<>();
DelegateCommand delegateCommand1 = new DelegateCommand(() -> {
sleep(500);
}, condition, true);
DelegateCommand delegateCommand2 = new DelegateCommand(() -> {
sleep(1000);
future.complete(null);
}, condition, true);
DelegateCommand delegateCommand3 = new DelegateCommand(() -> {
}, condition, false);
CompositeCommand compositeCommand = new CompositeCommand(delegateCommand1, delegateCommand2, delegateCommand3);
assertFalse(compositeCommand.runningProperty().get());
assertFalse(delegateCommand1.runningProperty().get());
assertFalse(delegateCommand2.runningProperty().get());
assertFalse(delegateCommand3.runningProperty().get());
compositeCommand.execute();
assertTrue(compositeCommand.runningProperty().get());
assertTrue(delegateCommand1.runningProperty().get());
assertTrue(delegateCommand2.runningProperty().get());
assertFalse(delegateCommand3.runningProperty().get());
future.get(3, TimeUnit.SECONDS);
assertFalse(compositeCommand.runningProperty().get());
assertFalse(delegateCommand1.runningProperty().get());
assertFalse(delegateCommand2.runningProperty().get());
assertFalse(delegateCommand3.runningProperty().get());
}
private void sleep(long millis) {
try {
Thread.sleep(millis);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
#204 add test case to reproduce: Exception is thrown when last delegate command is removed.
|
mvvmfx/src/test/java/de/saxsys/mvvmfx/utils/commands/CompositeCommandTest.java
|
#204 add test case to reproduce: Exception is thrown when last delegate command is removed.
|
|
Java
|
apache-2.0
|
634b6146293a65e404ab0d29104716d850a2eace
| 0
|
adrielparedes/guvnor,cristianonicolai/guvnor,baldimir/guvnor,adrielparedes/guvnor,etirelli/guvnor,droolsjbpm/guvnor,kiereleaseuser/guvnor,Rikkola/guvnor,porcelli-forks/guvnor,wmedvede/guvnor,adrielparedes/guvnor,etirelli/guvnor,wmedvede/guvnor,porcelli-forks/guvnor,droolsjbpm/guvnor,kiereleaseuser/guvnor,baldimir/guvnor,psiroky/guvnor,cristianonicolai/guvnor,baldimir/guvnor,hxf0801/guvnor,nmirasch/guvnor,hxf0801/guvnor,psiroky/guvnor,yurloc/guvnor,mswiderski/guvnor,kiereleaseuser/guvnor,wmedvede/guvnor,nmirasch/guvnor,Rikkola/guvnor,droolsjbpm/guvnor,Rikkola/guvnor,mbiarnes/guvnor,cristianonicolai/guvnor,mbiarnes/guvnor,yurloc/guvnor,etirelli/guvnor,porcelli-forks/guvnor,hxf0801/guvnor,psiroky/guvnor,mbiarnes/guvnor,nmirasch/guvnor
|
/**
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.server.files;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import net.sf.webdav.ITransaction;
import net.sf.webdav.IWebdavStore;
import net.sf.webdav.StoredObject;
import org.apache.commons.io.IOUtils;
import org.drools.guvnor.server.security.AdminType;
import org.drools.guvnor.server.security.RoleTypes;
import org.drools.guvnor.server.security.WebDavPackageNameType;
import org.drools.repository.AssetItem;
import org.drools.repository.PackageItem;
import org.drools.repository.RulesRepository;
import org.drools.repository.VersionableItem;
import org.jboss.seam.contexts.Contexts;
import org.jboss.seam.security.Identity;
public class WebDAVImpl
implements
IWebdavStore {
/** for the rubbish OSX double data (the ._ rubbish) */
static Map<String, byte[]> osxDoubleData = Collections.synchronizedMap( new WeakHashMap<String, byte[]>() );
final ThreadLocal<RulesRepository> tlRepo = new ThreadLocal<RulesRepository>();
public WebDAVImpl(File f) {
}
public WebDAVImpl() {
}
public WebDAVImpl(RulesRepository testRepo) {
tlRepo.set( testRepo );
}
RulesRepository getRepo() {
return tlRepo.get();
}
public ITransaction begin(final Principal pr) {
tlRepo.set( RestAPIServlet.getRepository() );
return new ITransaction() {
public Principal getPrincipal() {
return pr;
}
};
}
public void checkAuthentication(ITransaction arg0) {
//already done
}
public void commit(ITransaction arg0) {
//System.out.println("COMMIT START");
getRepo().save();
tlRepo.set( null );
//System.out.println("COMMIT END");
}
public void createFolder(ITransaction arg0,
String uri) {
//System.out.println("creating folder:" + uri);
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && isAdmin() ) {
if ( path.length > 2 ) {
throw new UnsupportedOperationException( "Can't nest packages." );
}
RulesRepository repository = getRepo();
if ( repository.containsPackage( path[1] ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
pkg.archiveItem( false );
pkg.checkin( "<restored by webdav>" );
} else {
repository.createPackage( path[1],
"<from webdav>" );
}
} else {
throw new UnsupportedOperationException( "Not able to create folders here..." );
}
}
public void createResource(ITransaction arg0,
String uri) {
//System.out.println("creating resource:" + uri);
//for mac OSX, ignore these annoying things
if ( uri.endsWith( ".DS_Store" ) ) return;
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_ADMIN ) ) {
if ( path.length > 3 ) {
throw new UnsupportedOperationException( "Can't do nested packages." );
}
String packageName = path[1];
String[] resource = AssetItem.getAssetNameFromFileName( path[2] );
RulesRepository repository = getRepo();
PackageItem pkg = repository.loadPackage( packageName );
//for mac OSX, ignore these resource fork files
if ( path[2].startsWith( "._" ) ) {
this.osxDoubleData.put( uri,
null );
return;
}
if ( pkg.containsAsset( resource[0] ) ) {
AssetItem lazarus = pkg.loadAsset( resource[0] );
lazarus.archiveItem( false );
//lazarus.checkin("<from webdav>");
} else {
AssetItem asset = pkg.addAsset( resource[0],
"" );
asset.updateFormat( resource[1] );
//asset.checkin("<from webdav>");
}
} else {
throw new UnsupportedOperationException( "Can't add assets here." );
}
}
public String[] getChildrenNames(ITransaction arg0,
String uri) {
//System.out.println("getChildrenNames :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
List<String> result = new ArrayList<String>();
if ( path.length == 0 ) {
return new String[]{"packages", "snapshots"};
}
if ( path[0].equals( "packages" ) ) {
if ( path.length > 2 ) {
return null;
}
if ( path.length == 1 ) {
listPackages( repository,
result );
} else if ( checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
Iterator<AssetItem> it = pkg.getAssets();
while ( it.hasNext() ) {
AssetItem asset = it.next();
if ( !asset.isArchived() ) {
result.add( asset.getName() + "." + asset.getFormat() );
}
}
}
} else if ( path[0].equals( "snapshots" ) ) {
if ( path.length > 3 ) {
return null;
}
if ( path.length == 1 ) {
listPackages( repository,
result );
} else if ( path.length == 2 && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
String[] snaps = repository.listPackageSnapshots( path[1] );
return snaps;
} else if ( path.length == 3 && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
Iterator<AssetItem> it = repository.loadPackageSnapshot( path[1],
path[2] ).getAssets();
while ( it.hasNext() ) {
AssetItem asset = it.next();
if ( !asset.isArchived() ) {
result.add( asset.getName() + "." + asset.getFormat() );
}
}
} else {
throw new IllegalArgumentException();
}
} else {
throw new UnsupportedOperationException( "Not a valid path : " + path[0] );
}
return result.toArray( new String[result.size()] );
}
private void listPackages(RulesRepository repository,
List<String> result) {
Iterator<PackageItem> it = repository.listPackages();
while ( it.hasNext() ) {
PackageItem pkg = it.next();
String packageName = pkg.getName();
if ( !pkg.isArchived() && checkPackagePermission( packageName,
RoleTypes.PACKAGE_READONLY ) ) {
result.add( packageName );
}
}
}
public Date getCreationDate(String uri) {
//System.out.println("getCreationDate :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length < 2 ) return new Date();
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path.length == 2 ) {
//dealing with package
return pkg.getCreatedDate().getTime();
} else {
String fileName = path[2];
String assetName = AssetItem.getAssetNameFromFileName( fileName )[0];
AssetItem asset = pkg.loadAsset( assetName );
return asset.getCreatedDate().getTime();
}
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
if ( path.length == 2 ) {
return new Date();
} else if ( path.length == 3 ) {
return repository.loadPackageSnapshot( path[1],
path[2] ).getCreatedDate().getTime();
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
return asset.getCreatedDate().getTime();
} else {
throw new UnsupportedOperationException();
}
} else {
throw new UnsupportedOperationException();
}
}
public Date getLastModified(String uri) {
//System.out.println("getLastModified :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length < 2 ) return new Date();
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path.length == 2 ) {
//dealing with package
return pkg.getLastModified().getTime();
} else {
String fileName = path[2];
String assetName = AssetItem.getAssetNameFromFileName( fileName )[0];
AssetItem asset = pkg.loadAsset( assetName );
return asset.getLastModified().getTime();
}
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
if ( path.length == 2 ) {
return new Date();
} else if ( path.length == 3 ) {
return repository.loadPackageSnapshot( path[1],
path[2] ).getLastModified().getTime();
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
return asset.getLastModified().getTime();
} else {
throw new UnsupportedOperationException();
}
} else {
throw new UnsupportedOperationException();
}
}
public InputStream getResourceContent(ITransaction arg0,
String uri) {
//System.out.println("get resource content:" + uri);
return getContent( uri );
}
public StoredObject getStoredObject(ITransaction arg0,
String uri) {
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length < 2 ) {
StoredObject so = new StoredObject();
so.setCreationDate( new Date() );
so.setFolder( isFolder( uri ) );
so.setLastModified( new Date() );
so.setResourceLength( 0 );
return so;
}
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path.length == 2 ) {
//dealing with package
return createStoredObject( uri,
pkg,
0 );
} else {
String fileName = path[2];
String assetName = AssetItem.getAssetNameFromFileName( fileName )[0];
AssetItem asset;
try {
asset = pkg.loadAsset( assetName );
} catch ( Exception e ) {
return null;
}
return createStoredObject( uri,
asset,
asset.getContentLength() );
}
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
if( path.length == 2 ){
PackageItem pkg = repository.loadPackage( path[1] );
StoredObject so = createStoredObject( uri,
pkg,
0 );
so.setFolder( isFolder( uri ) );
return so;
}
else if ( path.length == 3 ) {
PackageItem snapshot = repository.loadPackageSnapshot( path[1],
path[2] );
// AssetItem asset;
// try {
// asset = snapshot.loadAsset( AssetItem.getAssetNameFromFileName( path[2] )[0] );
// } catch ( Exception e ) {
// return null;
// }
return createStoredObject( uri,
snapshot,
0 );
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset;
try {
asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
} catch ( Exception e ) {
return null;
}
return createStoredObject( uri,
asset,
asset.getContentLength() );
} else {
throw new UnsupportedOperationException();
}
} else {
throw new UnsupportedOperationException();
}
}
private StoredObject createStoredObject(String uri,
VersionableItem pi,
long resourceLength) {
StoredObject so = new StoredObject();
so.setCreationDate( pi.getCreatedDate().getTime() );
so.setFolder( isFolder( uri ) );
so.setLastModified( pi.getLastModified().getTime() );
so.setResourceLength( resourceLength );
return so;
}
private InputStream getContent(String uri) {
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
String pkg = path[1];
String asset = AssetItem.getAssetNameFromFileName( path[2] )[0];
AssetItem assetItem = repository.loadPackage( pkg ).loadAsset( asset );
return getAssetData( assetItem );
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
String pkg = path[1];
String snap = path[2];
String asset = AssetItem.getAssetNameFromFileName( path[3] )[0];
AssetItem assetItem = repository.loadPackageSnapshot( pkg,
snap ).loadAsset( asset );
return getAssetData( assetItem );
} else {
throw new UnsupportedOperationException();
}
}
private InputStream getAssetData(AssetItem assetItem) {
if ( assetItem.isBinary() ) {
return assetItem.getBinaryContentAttachment();
} else {
return new ByteArrayInputStream( assetItem.getContent().getBytes() );
}
}
public long getResourceLength(ITransaction arg0,
String uri) {
//System.out.println("get resource length :" + uri);
String[] path = getPath( uri );
try {
RulesRepository repo = getRepo();
if ( path.length == 3 && path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repo.loadPackage( path[1] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[2] )[0] );
return asset.getContentLength();
} else if ( path.length == 4 && path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repo.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
return asset.getContentLength();
} else {
return 0;
}
} catch ( Exception e ) {
System.err.println( "Not able to get content length" );
return 0;
}
}
boolean isFolder(String uri) {
//System.out.println("is folder :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length == 0 ) return true;
if ( path.length == 1 && (path[0].equals( "packages" ) || path[0].equals( "snapshots" )) ) {
return true;
} else if ( path.length == 2 ) {
return repository.containsPackage( path[1] );
} else if ( path.length == 3 && path[0].equals( "snapshots" ) ) {
return repository.containsPackage( path[1] );
} else {
return false;
}
}
boolean isResource(String uri) {
RulesRepository repository = getRepo();
//System.out.println("is resource :" + uri);
String[] path = getPath( uri );
if ( path.length < 3 ) return false;
if ( !(path[0].equals( "packages" ) || path[0].equals( "snapshots" )) ) return false;
if ( repository.containsPackage( path[1] ) ) {
if ( path[0].equals( "packages" ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path[2].startsWith( "._" ) ) {
return osxDoubleData.containsKey( uri );
}
return pkg.containsAsset( AssetItem.getAssetNameFromFileName( path[2] )[0] );
} else {
if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
return pkg.containsAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
} else {
return false;
}
}
} else {
return false;
}
}
boolean objectExists(String uri) {
if ( uri.indexOf( " copy " ) > 0 ) {
throw new IllegalArgumentException( "OSX is not capable of copy and pasting without breaking the file extension." );
}
return internalObjectExists( uri );
}
private boolean internalObjectExists(String uri) {
RulesRepository repository = getRepo();
//System.out.println("object exist check :" + uri);
if ( uri.endsWith( ".DS_Store" ) ) return false;
String[] path = getPath( uri );
if ( path.length == 0 ) return true;
if ( path.length == 1 && (path[0].equals( "packages" ) || path[0].equals( "snapshots" )) ) {
return true;
} else {
if ( path.length == 1 ) return false;
if ( !repository.containsPackage( path[1] ) ) {
return false;
}
if ( path[0].equals( "packages" ) ) {
if ( path.length == 2 ) {
PackageItem pkg = repository.loadPackage( path[1] );
return !pkg.isArchived();
} else {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path[2].startsWith( "._" ) ) {
return this.osxDoubleData.containsKey( uri );
}
String assetName = AssetItem.getAssetNameFromFileName( path[2] )[0];
return pkg.containsAsset( assetName ) && !pkg.loadAsset( assetName ).isArchived();
}
} else if ( path[0].equals( "snapshots" ) ) {
if ( path.length == 2 ) {
return repository.containsPackage( path[1] );
} else if ( path.length == 3 ) {
return repository.containsSnapshot( path[1],
path[2] );
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
return pkg.containsAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
} else {
return false;
}
} else {
throw new IllegalStateException();
}
}
}
public void removeObject(ITransaction arg0,
String uri) {
RulesRepository repository = getRepo();
//System.out.println("remove object:" + uri);
String[] path = getPath( uri );
if ( path.length == 0 || path.length == 1 ) {
throw new IllegalArgumentException();
}
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_DEVELOPER ) ) {
String packName = path[1];
PackageItem pkg = repository.loadPackage( packName );
if ( path.length == 3 ) {
//delete asset
if ( path[2].startsWith( "._" ) ) {
osxDoubleData.remove( uri );
return;
}
String asset = AssetItem.getAssetNameFromFileName( path[2] )[0];
AssetItem item = pkg.loadAsset( asset );
item.archiveItem( true );
item.checkin( "" );
} else {
//delete package
pkg.archiveItem( true );
pkg.checkin( "" );
}
} else {
throw new IllegalArgumentException( "Not allowed to remove this file." );
}
}
public void rollback(ITransaction arg0) {
//System.out.println("ROLLBACK");
RulesRepository repository = getRepo();
repository.getSession().logout();
}
public long setResourceContent(ITransaction arg0,
String uri,
InputStream content,
String contentType,
String characterEncoding) {
RulesRepository repository = getRepo();
//System.out.println("set resource content:" + uri);
if ( uri.endsWith( ".DS_Store" ) ) return 0;
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_DEVELOPER ) ) {
if ( path.length != 3 ) {
throw new IllegalArgumentException( "Not a valid resource path " + uri );
}
String packageName = path[1];
if ( path[2].startsWith( "._" ) ) {
try {
this.osxDoubleData.put( uri,
IOUtils.toByteArray( content ) );
} catch ( IOException e ) {
throw new RuntimeException( e );
}
return 0;
}
String[] assetName = AssetItem.getAssetNameFromFileName( path[2] );
PackageItem pkg = repository.loadPackage( packageName );
AssetItem asset = pkg.loadAsset( assetName[0] );
asset.updateBinaryContentAttachment( content );
//here we could save, or check in, depending on if enough time has passed to justify
//a new version. Otherwise we will pollute the version history with lots of trivial versions.
//if (shouldCreateNewVersion(asset.getLastModified())) {
asset.checkin( "<content from webdav>" );
//}
} else {
throw new UnsupportedOperationException( "Unable to save content to this location." );
}
return 0;
}
String[] getPath(String uri) {
if ( uri.equals( "/" ) ) {
return new String[0];
}
if ( uri.endsWith( "webdav" ) || uri.endsWith( "webdav/" ) ) {
return new String[0];
}
if ( uri.indexOf( "webdav" ) > -1 ) {
return uri.split( "webdav/" )[1].split( "/" );
} else {
return uri.substring( 1 ).split( "/" );
}
}
private boolean isAdmin() {
if ( Contexts.isSessionContextActive() ) {
try {
Identity.instance().checkPermission( new AdminType(),
RoleTypes.ADMIN );
return true;
} catch ( Exception e ) {
return false;
}
} else {
return true;
}
}
private boolean checkPackagePermission(String packageName,
String type) {
if ( Contexts.isSessionContextActive() ) {
try {
Identity.instance().checkPermission( new WebDavPackageNameType( packageName ),
type );
return true;
} catch ( Exception e ) {
return false;
}
} else {
return true;
}
}
}
|
drools-guvnor/src/main/java/org/drools/guvnor/server/files/WebDAVImpl.java
|
/**
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.server.files;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import net.sf.webdav.ITransaction;
import net.sf.webdav.IWebdavStore;
import net.sf.webdav.StoredObject;
import org.apache.commons.io.IOUtils;
import org.drools.guvnor.server.security.AdminType;
import org.drools.guvnor.server.security.RoleTypes;
import org.drools.guvnor.server.security.WebDavPackageNameType;
import org.drools.repository.AssetItem;
import org.drools.repository.PackageItem;
import org.drools.repository.RulesRepository;
import org.drools.repository.VersionableItem;
import org.jboss.seam.contexts.Contexts;
import org.jboss.seam.security.Identity;
public class WebDAVImpl
implements
IWebdavStore {
/** for the rubbish OSX double data (the ._ rubbish) */
static Map<String, byte[]> osxDoubleData = Collections.synchronizedMap( new WeakHashMap<String, byte[]>() );
final ThreadLocal<RulesRepository> tlRepo = new ThreadLocal<RulesRepository>();
public WebDAVImpl(File f) {
}
public WebDAVImpl() {
}
public WebDAVImpl(RulesRepository testRepo) {
tlRepo.set( testRepo );
}
RulesRepository getRepo() {
return tlRepo.get();
}
public ITransaction begin(final Principal pr) {
tlRepo.set( RestAPIServlet.getRepository() );
return new ITransaction() {
public Principal getPrincipal() {
return pr;
}
};
}
public void checkAuthentication(ITransaction arg0) {
//already done
}
public void commit(ITransaction arg0) {
//System.out.println("COMMIT START");
getRepo().save();
tlRepo.set( null );
//System.out.println("COMMIT END");
}
public void createFolder(ITransaction arg0,
String uri) {
//System.out.println("creating folder:" + uri);
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && isAdmin() ) {
if ( path.length > 2 ) {
throw new UnsupportedOperationException( "Can't nest packages." );
}
RulesRepository repository = getRepo();
if ( repository.containsPackage( path[1] ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
pkg.archiveItem( false );
pkg.checkin( "<restored by webdav>" );
} else {
repository.createPackage( path[1],
"<from webdav>" );
}
} else {
throw new UnsupportedOperationException( "Not able to create folders here..." );
}
}
public void createResource(ITransaction arg0,
String uri) {
//System.out.println("creating resource:" + uri);
//for mac OSX, ignore these annoying things
if ( uri.endsWith( ".DS_Store" ) ) return;
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_ADMIN ) ) {
if ( path.length > 3 ) {
throw new UnsupportedOperationException( "Can't do nested packages." );
}
String packageName = path[1];
String[] resource = AssetItem.getAssetNameFromFileName( path[2] );
RulesRepository repository = getRepo();
PackageItem pkg = repository.loadPackage( packageName );
//for mac OSX, ignore these resource fork files
if ( path[2].startsWith( "._" ) ) {
this.osxDoubleData.put( uri,
null );
return;
}
if ( pkg.containsAsset( resource[0] ) ) {
AssetItem lazarus = pkg.loadAsset( resource[0] );
lazarus.archiveItem( false );
//lazarus.checkin("<from webdav>");
} else {
AssetItem asset = pkg.addAsset( resource[0],
"" );
asset.updateFormat( resource[1] );
//asset.checkin("<from webdav>");
}
} else {
throw new UnsupportedOperationException( "Can't add assets here." );
}
}
public String[] getChildrenNames(ITransaction arg0,
String uri) {
//System.out.println("getChildrenNames :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
List<String> result = new ArrayList<String>();
if ( path.length == 0 ) {
return new String[]{"packages", "snapshots"};
}
if ( path[0].equals( "packages" ) ) {
if ( path.length > 2 ) {
return null;
}
if ( path.length == 1 ) {
listPackages( repository,
result );
} else if ( checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
Iterator<AssetItem> it = pkg.getAssets();
while ( it.hasNext() ) {
AssetItem asset = it.next();
if ( !asset.isArchived() ) {
result.add( asset.getName() + "." + asset.getFormat() );
}
}
}
} else if ( path[0].equals( "snapshots" ) ) {
if ( path.length > 3 ) {
return null;
}
if ( path.length == 1 ) {
listPackages( repository,
result );
} else if ( path.length == 2 && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
String[] snaps = repository.listPackageSnapshots( path[1] );
return snaps;
} else if ( path.length == 3 && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
Iterator<AssetItem> it = repository.loadPackageSnapshot( path[1],
path[2] ).getAssets();
while ( it.hasNext() ) {
AssetItem asset = it.next();
if ( !asset.isArchived() ) {
result.add( asset.getName() + "." + asset.getFormat() );
}
}
} else {
throw new IllegalArgumentException();
}
} else {
throw new UnsupportedOperationException( "Not a valid path : " + path[0] );
}
return result.toArray( new String[result.size()] );
}
private void listPackages(RulesRepository repository,
List<String> result) {
Iterator<PackageItem> it = repository.listPackages();
while ( it.hasNext() ) {
PackageItem pkg = it.next();
String packageName = pkg.getName();
if ( !pkg.isArchived() && checkPackagePermission( packageName,
RoleTypes.PACKAGE_READONLY ) ) {
result.add( packageName );
}
}
}
public Date getCreationDate(String uri) {
//System.out.println("getCreationDate :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length < 2 ) return new Date();
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path.length == 2 ) {
//dealing with package
return pkg.getCreatedDate().getTime();
} else {
String fileName = path[2];
String assetName = AssetItem.getAssetNameFromFileName( fileName )[0];
AssetItem asset = pkg.loadAsset( assetName );
return asset.getCreatedDate().getTime();
}
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
if ( path.length == 2 ) {
return new Date();
} else if ( path.length == 3 ) {
return repository.loadPackageSnapshot( path[1],
path[2] ).getCreatedDate().getTime();
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
return asset.getCreatedDate().getTime();
} else {
throw new UnsupportedOperationException();
}
} else {
throw new UnsupportedOperationException();
}
}
public Date getLastModified(String uri) {
//System.out.println("getLastModified :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length < 2 ) return new Date();
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path.length == 2 ) {
//dealing with package
return pkg.getLastModified().getTime();
} else {
String fileName = path[2];
String assetName = AssetItem.getAssetNameFromFileName( fileName )[0];
AssetItem asset = pkg.loadAsset( assetName );
return asset.getLastModified().getTime();
}
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
if ( path.length == 2 ) {
return new Date();
} else if ( path.length == 3 ) {
return repository.loadPackageSnapshot( path[1],
path[2] ).getLastModified().getTime();
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
return asset.getLastModified().getTime();
} else {
throw new UnsupportedOperationException();
}
} else {
throw new UnsupportedOperationException();
}
}
public InputStream getResourceContent(ITransaction arg0,
String uri) {
//System.out.println("get resource content:" + uri);
return getContent( uri );
}
public StoredObject getStoredObject(ITransaction arg0,
String uri) {
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length < 2 ) {
StoredObject so = new StoredObject();
so.setCreationDate( new Date() );
so.setFolder( isFolder( uri ) );
so.setLastModified( new Date() );
so.setResourceLength( 0 );
return so;
}
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path.length == 2 ) {
//dealing with package
return createStoredObject( uri,
pkg,
0 );
} else {
String fileName = path[2];
String assetName = AssetItem.getAssetNameFromFileName( fileName )[0];
AssetItem asset;
try {
asset = pkg.loadAsset( assetName );
} catch ( Exception e ) {
return null;
}
return createStoredObject( uri,
asset,
asset.getContentLength() );
}
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
if ( path.length == 3 ) {
PackageItem snapshot = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset;
try {
asset = snapshot.loadAsset( AssetItem.getAssetNameFromFileName( path[2] )[0] );
} catch ( Exception e ) {
return null;
}
return createStoredObject( uri,
snapshot,
asset.getContentLength() );
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset;
try {
asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
} catch ( Exception e ) {
return null;
}
return createStoredObject( uri,
asset,
asset.getContentLength() );
} else {
throw new UnsupportedOperationException();
}
} else {
throw new UnsupportedOperationException();
}
}
private StoredObject createStoredObject(String uri,
VersionableItem pi,
long resourceLength) {
StoredObject so = new StoredObject();
so.setCreationDate( pi.getCreatedDate().getTime() );
so.setFolder( isFolder( uri ) );
so.setLastModified( pi.getLastModified().getTime() );
so.setResourceLength( resourceLength );
return so;
}
private InputStream getContent(String uri) {
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
String pkg = path[1];
String asset = AssetItem.getAssetNameFromFileName( path[2] )[0];
AssetItem assetItem = repository.loadPackage( pkg ).loadAsset( asset );
return getAssetData( assetItem );
} else if ( path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
String pkg = path[1];
String snap = path[2];
String asset = AssetItem.getAssetNameFromFileName( path[3] )[0];
AssetItem assetItem = repository.loadPackageSnapshot( pkg,
snap ).loadAsset( asset );
return getAssetData( assetItem );
} else {
throw new UnsupportedOperationException();
}
}
private InputStream getAssetData(AssetItem assetItem) {
if ( assetItem.isBinary() ) {
return assetItem.getBinaryContentAttachment();
} else {
return new ByteArrayInputStream( assetItem.getContent().getBytes() );
}
}
public long getResourceLength(ITransaction arg0,
String uri) {
//System.out.println("get resource length :" + uri);
String[] path = getPath( uri );
try {
RulesRepository repo = getRepo();
if ( path.length == 3 && path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repo.loadPackage( path[1] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[2] )[0] );
return asset.getContentLength();
} else if ( path.length == 4 && path[0].equals( "snapshots" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_READONLY ) ) {
PackageItem pkg = repo.loadPackageSnapshot( path[1],
path[2] );
AssetItem asset = pkg.loadAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
return asset.getContentLength();
} else {
return 0;
}
} catch ( Exception e ) {
System.err.println( "Not able to get content length" );
return 0;
}
}
boolean isFolder(String uri) {
//System.out.println("is folder :" + uri);
RulesRepository repository = getRepo();
String[] path = getPath( uri );
if ( path.length == 0 ) return true;
if ( path.length == 1 && (path[0].equals( "packages" ) || path[0].equals( "snapshots" )) ) {
return true;
} else if ( path.length == 2 ) {
return repository.containsPackage( path[1] );
} else if ( path.length == 3 && path[0].equals( "snapshots" ) ) {
return repository.containsPackage( path[1] );
} else {
return false;
}
}
boolean isResource(String uri) {
RulesRepository repository = getRepo();
//System.out.println("is resource :" + uri);
String[] path = getPath( uri );
if ( path.length < 3 ) return false;
if ( !(path[0].equals( "packages" ) || path[0].equals( "snapshots" )) ) return false;
if ( repository.containsPackage( path[1] ) ) {
if ( path[0].equals( "packages" ) ) {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path[2].startsWith( "._" ) ) {
return osxDoubleData.containsKey( uri );
}
return pkg.containsAsset( AssetItem.getAssetNameFromFileName( path[2] )[0] );
} else {
if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
return pkg.containsAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
} else {
return false;
}
}
} else {
return false;
}
}
boolean objectExists(String uri) {
if ( uri.indexOf( " copy " ) > 0 ) {
throw new IllegalArgumentException( "OSX is not capable of copy and pasting without breaking the file extension." );
}
return internalObjectExists( uri );
}
private boolean internalObjectExists(String uri) {
RulesRepository repository = getRepo();
//System.out.println("object exist check :" + uri);
if ( uri.endsWith( ".DS_Store" ) ) return false;
String[] path = getPath( uri );
if ( path.length == 0 ) return true;
if ( path.length == 1 && (path[0].equals( "packages" ) || path[0].equals( "snapshots" )) ) {
return true;
} else {
if ( path.length == 1 ) return false;
if ( !repository.containsPackage( path[1] ) ) {
return false;
}
if ( path[0].equals( "packages" ) ) {
if ( path.length == 2 ) {
PackageItem pkg = repository.loadPackage( path[1] );
return !pkg.isArchived();
} else {
PackageItem pkg = repository.loadPackage( path[1] );
if ( path[2].startsWith( "._" ) ) {
return this.osxDoubleData.containsKey( uri );
}
String assetName = AssetItem.getAssetNameFromFileName( path[2] )[0];
return pkg.containsAsset( assetName ) && !pkg.loadAsset( assetName ).isArchived();
}
} else if ( path[0].equals( "snapshots" ) ) {
if ( path.length == 2 ) {
return repository.containsPackage( path[1] );
} else if ( path.length == 3 ) {
return repository.containsSnapshot( path[1],
path[2] );
} else if ( path.length == 4 ) {
PackageItem pkg = repository.loadPackageSnapshot( path[1],
path[2] );
return pkg.containsAsset( AssetItem.getAssetNameFromFileName( path[3] )[0] );
} else {
return false;
}
} else {
throw new IllegalStateException();
}
}
}
public void removeObject(ITransaction arg0,
String uri) {
RulesRepository repository = getRepo();
//System.out.println("remove object:" + uri);
String[] path = getPath( uri );
if ( path.length == 0 || path.length == 1 ) {
throw new IllegalArgumentException();
}
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_DEVELOPER ) ) {
String packName = path[1];
PackageItem pkg = repository.loadPackage( packName );
if ( path.length == 3 ) {
//delete asset
if ( path[2].startsWith( "._" ) ) {
osxDoubleData.remove( uri );
return;
}
String asset = AssetItem.getAssetNameFromFileName( path[2] )[0];
AssetItem item = pkg.loadAsset( asset );
item.archiveItem( true );
item.checkin( "" );
} else {
//delete package
pkg.archiveItem( true );
pkg.checkin( "" );
}
} else {
throw new IllegalArgumentException( "Not allowed to remove this file." );
}
}
public void rollback(ITransaction arg0) {
//System.out.println("ROLLBACK");
RulesRepository repository = getRepo();
repository.getSession().logout();
}
public long setResourceContent(ITransaction arg0,
String uri,
InputStream content,
String contentType,
String characterEncoding) {
RulesRepository repository = getRepo();
//System.out.println("set resource content:" + uri);
if ( uri.endsWith( ".DS_Store" ) ) return 0;
String[] path = getPath( uri );
if ( path[0].equals( "packages" ) && checkPackagePermission( path[1],
RoleTypes.PACKAGE_DEVELOPER ) ) {
if ( path.length != 3 ) {
throw new IllegalArgumentException( "Not a valid resource path " + uri );
}
String packageName = path[1];
if ( path[2].startsWith( "._" ) ) {
try {
this.osxDoubleData.put( uri,
IOUtils.toByteArray( content ) );
} catch ( IOException e ) {
throw new RuntimeException( e );
}
return 0;
}
String[] assetName = AssetItem.getAssetNameFromFileName( path[2] );
PackageItem pkg = repository.loadPackage( packageName );
AssetItem asset = pkg.loadAsset( assetName[0] );
asset.updateBinaryContentAttachment( content );
//here we could save, or check in, depending on if enough time has passed to justify
//a new version. Otherwise we will pollute the version history with lots of trivial versions.
//if (shouldCreateNewVersion(asset.getLastModified())) {
asset.checkin( "<content from webdav>" );
//}
} else {
throw new UnsupportedOperationException( "Unable to save content to this location." );
}
return 0;
}
String[] getPath(String uri) {
if ( uri.equals( "/" ) ) {
return new String[0];
}
if ( uri.endsWith( "webdav" ) || uri.endsWith( "webdav/" ) ) {
return new String[0];
}
if ( uri.indexOf( "webdav" ) > -1 ) {
return uri.split( "webdav/" )[1].split( "/" );
} else {
return uri.substring( 1 ).split( "/" );
}
}
private boolean isAdmin() {
if ( Contexts.isSessionContextActive() ) {
try {
Identity.instance().checkPermission( new AdminType(),
RoleTypes.ADMIN );
return true;
} catch ( Exception e ) {
return false;
}
} else {
return true;
}
}
private boolean checkPackagePermission(String packageName,
String type) {
if ( Contexts.isSessionContextActive() ) {
try {
Identity.instance().checkPermission( new WebDavPackageNameType( packageName ),
type );
return true;
} catch ( Exception e ) {
return false;
}
} else {
return true;
}
}
}
|
GUVNOR-604:Guvnor can not response a 'PROPFIND' request which is used to get the children list of 'snapshots'
git-svn-id: a243bed356d289ca0d1b6d299a0597bdc4ecaa09@35141 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70
|
drools-guvnor/src/main/java/org/drools/guvnor/server/files/WebDAVImpl.java
|
GUVNOR-604:Guvnor can not response a 'PROPFIND' request which is used to get the children list of 'snapshots'
|
|
Java
|
apache-2.0
|
b8da60b0ca618d22c60776646c86bd2bb44e399b
| 0
|
MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab
|
package org.myrobotlab.service;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Map;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.myrobotlab.document.Classification;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.test.AbstractTest;
import org.myrobotlab.test.ChaosMonkey;
import org.slf4j.Logger;
// TODO: re-enable this unit test.. but for now it's just too slow ..
// it also opens a swing gui which isn't good.
public class OpenCVTest extends AbstractTest {
static OpenCV cv = null;
public final static Logger log = LoggerFactory.getLogger(OpenCVTest.class);
static SwingGui swing = null;
static final String TEST_DIR = "src/test/resources/OpenCV/";
static final String TEST_LOCAL_FACE_FILE_JPEG = "src/test/resources/OpenCV/multipleFaces.jpg";
static final String TEST_LOCAL_MP4 = "src/test/resources/OpenCV/monkeyFace.mp4";
// static final String TEST_LOCAL_MP4 = "src/test/resources/OpenCV/big_buck_bunny.mp4";
// static final String TEST_YOUTUBE = "https://www.youtube.com/watch?v=I9VA-U69yaY";
static final String TEST_INPUT_DIR = "src/test/resources/OpenCV/kinect-data";
static final String TEST_TRANSPARENT_FILE_PNG = "src/test/resources/OpenCV/transparent-bubble.png";
// static final String TEST_REMOTE_FILE_JPG = TEST_LOCAL_FACE_FILE_JPEG;
// static final String TEST_REMOTE_FILE_JPG = "https://en.wikipedia.org/wiki/Isaac_Asimov#/media/File:Isaac.Asimov01.jpg";
static final String TEST_REMOTE_FILE_JPG = "https://upload.wikimedia.org/wikipedia/commons/c/c0/Douglas_adams_portrait_cropped.jpg";
private static final int MAX_TIMEOUT = 999999999;//120000;
// TODO - getClassifictions publishClassifications
// TODO - getFaces publishFaces
// TODO - chaos monkey filter tester
public static void main(String[] args) {
try {
// // LoggingFactory.init("INFO");
setUpBeforeClass();
OpenCVTest test = new OpenCVTest();
test.testGetClassifications();
boolean quitNow = true;
if (quitNow) {
return;
}
test.testAllFilterTypes();
/*
* cv.capture("https://www.youtube.com/watch?v=I9VA-U69yaY");// red pill
* // green pill cv.capture(0); cv.stopCapture();
* cv.setGrabberType("Sarxos"); cv.capture(0);
* cv.capture("https://www.youtube.com/watch?v=zDO1Q_ox4vk");
* cv.capture(0);
* cv.capture("https://www.youtube.com/watch?v=zDO1Q_ox4vk");
* cv.capture(0);
*/
test.chaosCaptureTest();
// test.testAllCaptures();
// run junit as java app
JUnitCore junit = new JUnitCore();
Result result = junit.run(OpenCVTest.class);
log.info("Result failures: {}", result.getFailureCount());
} catch (Exception e) {
log.error("main threw", e);
}
}
@Rule
public final TestName testName = new TestName();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
log.warn("========= OpenCVTest - setupbefore class - begin loading libraries =========");
log.warn("========= OpenCVTest - setupbefore class - starting cv =========");
long ts = System.currentTimeMillis();
cv = (OpenCV) Runtime.start("cv", "OpenCV");
swing = (SwingGui) Runtime.start("gui", "SwingGui");
/*
log.warn("========= OpenCVTest - setupbefore class - started cv {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting capture =========");
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
log.warn("========= OpenCVTest - setupbefore class - started capture {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting getFaceDetect =========");
cv.getFaceDetect(120000);// two minute wait to load all libraries
log.warn("========= OpenCVTest - setupbefore class - started getFaceDetect {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting getClassifications =========");
cv.reset();
OpenCVFilter yoloFilter = cv.addFilter("yolo");
// cv.getClassifications(120000);
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
log.warn("========= OpenCVTest - setupbefore class - started getClassifications {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting getOpenCVData =========");
cv.reset();
cv.capture(TEST_LOCAL_MP4);
cv.getOpenCVData();
log.warn("========= OpenCVTest - setupbefore class - started getOpenCVData {} ms =========", System.currentTimeMillis()-ts );
cv.disableAll();
// if (!isHeadless()) { - no longer needed I believe - SwingGui now handles it
// }
*/
}
// FIXME - do the following test
// test all frame grabber types
// test all filters !
// test remote file source
// test mpeg streamer
// @Ignore
@Test
public final void chaosCaptureTest() throws Exception {
log.warn("=======OpenCVTest chaosCaptureTest=======");
ChaosMonkey.giveToMonkey(cv, "capture", TEST_LOCAL_FACE_FILE_JPEG);
ChaosMonkey.giveToMonkey(cv, "capture");
ChaosMonkey.giveToMonkey(cv, "stopCapture");
if (hasInternet()) {
// red pill green pill
ChaosMonkey.giveToMonkey(cv, "capture", TEST_LOCAL_MP4);
ChaosMonkey.giveToMonkey(cv, "capture", TEST_REMOTE_FILE_JPG);
}
ChaosMonkey.giveToMonkey(cv, "stopCapture");
if (!cv.isVirtual()) {
// if hasHardware camera index 0 - FIXME should check if camera 0 exists ?
ChaosMonkey.giveToMonkey(cv, "capture", 0);
}
ChaosMonkey.startMonkeys();
ChaosMonkey.monkeyReport();
// check after the monkeys have pounded on it - it still works !
cv.reset();
cv.removeFilters();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
List<Classification> data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
@Test
public final void simpleFaces() {
log.warn("=======OpenCVTest simpleFaces=======");
cv.reset();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
List<Classification> data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
@Test
public final void testAllCaptures() throws Exception {
log.warn("=======OpenCVTest testAllCaptures=======");
List<Classification> data = null;
/**
* Testing default captures after a reset when the frame grabber type is not
* explicitly set
*/
if (hasInternet()) {
// default internet jpg
cv.reset();
// cv.capture("https://upload.wikimedia.org/wikipedia/commons/c/c0/Douglas_adams_portrait_cropped.jpg");
cv.capture(TEST_REMOTE_FILE_JPG);
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
// default local mp4
cv.reset();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
// default local jpg
cv.reset();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
// default local directory
cv.reset();
cv.capture(TEST_INPUT_DIR);
assertNotNull(data);
/**
* Test ImageFile frame grabber
*/
if (hasInternet()) {
cv.reset();
cv.setGrabberType("ImageFile");
cv.capture("https://upload.wikimedia.org/wikipedia/commons/c/c0/Douglas_adams_portrait_cropped.jpg");
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
}
// TODO test enable disable & enableDisplay
/**
* minimally all filters should have the ability to load and run by themselves
* for a second
*/
@Test
public final void testAllFilterTypes() {
log.warn("=======OpenCVTest testAllFilterTypes=======");
log.info("starting all filters test");
cv.reset();
// 19 second blue red pill
cv.capture(TEST_LOCAL_MP4);
for (String fn : OpenCV.POSSIBLE_FILTERS) {
log.info("trying filter {}", fn);
if (fn.startsWith("DL4J") || fn.startsWith("Tesseract") || fn.startsWith("SimpleBlobDetector") || fn.startsWith("Solr") || fn.startsWith("Split")) {
log.info("skipping {}", fn);
continue;
}
cv.addFilter(fn);
sleep(1000);
cv.removeFilters();
}
log.info("done with all filters");
}
@Test
public final void testGetClassifications() {
log.warn("=======OpenCVTest testGetClassifications=======");
Runtime.setAllLocales("en");
cv.reset();
// cv.removeFilters();
log.warn("=======OpenCVTest testGetClassifications - 1=======");
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
// OpenCVFilter f =
log.warn("=======OpenCVTest testGetClassifications - 2=======");
cv.addFilter("yolo");
log.warn("=======OpenCVTest testGetClassifications - 3=======");
// f.enable();
log.warn("=======OpenCVTest testGetClassifications - cv.getLocale {} =======", cv.getLocale());
Map<String, List<Classification>> classifications = cv.getClassifications(MAX_TIMEOUT);
log.warn("=======OpenCVTest testGetClassifications - 4 {} =======", classifications);
assertNotNull(classifications);
log.warn("=======OpenCVTest testGetClassifications - 5 =======");
assertTrue(classifications.containsKey("person"));
log.warn("=======OpenCVTest testGetClassifications - 6 =======");
}
}
|
src/test/java/org/myrobotlab/service/OpenCVTest.java
|
package org.myrobotlab.service;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Map;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.myrobotlab.document.Classification;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.test.AbstractTest;
import org.myrobotlab.test.ChaosMonkey;
import org.slf4j.Logger;
// TODO: re-enable this unit test.. but for now it's just too slow ..
// it also opens a swing gui which isn't good.
public class OpenCVTest extends AbstractTest {
static OpenCV cv = null;
public final static Logger log = LoggerFactory.getLogger(OpenCVTest.class);
static SwingGui swing = null;
static final String TEST_DIR = "src/test/resources/OpenCV/";
static final String TEST_LOCAL_FACE_FILE_JPEG = "src/test/resources/OpenCV/multipleFaces.jpg";
static final String TEST_LOCAL_MP4 = "src/test/resources/OpenCV/monkeyFace.mp4";
// static final String TEST_LOCAL_MP4 = "src/test/resources/OpenCV/big_buck_bunny.mp4";
// static final String TEST_YOUTUBE = "https://www.youtube.com/watch?v=I9VA-U69yaY";
static final String TEST_INPUT_DIR = "src/test/resources/OpenCV/kinect-data";
static final String TEST_TRANSPARENT_FILE_PNG = "src/test/resources/OpenCV/transparent-bubble.png";
// static final String TEST_REMOTE_FILE_JPG = TEST_LOCAL_FACE_FILE_JPEG;
// static final String TEST_REMOTE_FILE_JPG = "https://en.wikipedia.org/wiki/Isaac_Asimov#/media/File:Isaac.Asimov01.jpg";
static final String TEST_REMOTE_FILE_JPG = "https://upload.wikimedia.org/wikipedia/commons/c/c0/Douglas_adams_portrait_cropped.jpg";
private static final int MAX_TIMEOUT = 999999999;//120000;
// TODO - getClassifictions publishClassifications
// TODO - getFaces publishFaces
// TODO - chaos monkey filter tester
public static void main(String[] args) {
try {
// // LoggingFactory.init("INFO");
setUpBeforeClass();
OpenCVTest test = new OpenCVTest();
test.testGetClassifications();
boolean quitNow = true;
if (quitNow) {
return;
}
test.testAllFilterTypes();
/*
* cv.capture("https://www.youtube.com/watch?v=I9VA-U69yaY");// red pill
* // green pill cv.capture(0); cv.stopCapture();
* cv.setGrabberType("Sarxos"); cv.capture(0);
* cv.capture("https://www.youtube.com/watch?v=zDO1Q_ox4vk");
* cv.capture(0);
* cv.capture("https://www.youtube.com/watch?v=zDO1Q_ox4vk");
* cv.capture(0);
*/
test.chaosCaptureTest();
// test.testAllCaptures();
// run junit as java app
JUnitCore junit = new JUnitCore();
Result result = junit.run(OpenCVTest.class);
log.info("Result failures: {}", result.getFailureCount());
} catch (Exception e) {
log.error("main threw", e);
}
}
@Rule
public final TestName testName = new TestName();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
log.warn("========= OpenCVTest - setupbefore class - begin loading libraries =========");
log.warn("========= OpenCVTest - setupbefore class - starting cv =========");
long ts = System.currentTimeMillis();
cv = (OpenCV) Runtime.start("cv", "OpenCV");
swing = (SwingGui) Runtime.start("gui", "SwingGui");
/*
log.warn("========= OpenCVTest - setupbefore class - started cv {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting capture =========");
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
log.warn("========= OpenCVTest - setupbefore class - started capture {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting getFaceDetect =========");
cv.getFaceDetect(120000);// two minute wait to load all libraries
log.warn("========= OpenCVTest - setupbefore class - started getFaceDetect {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting getClassifications =========");
cv.reset();
OpenCVFilter yoloFilter = cv.addFilter("yolo");
// cv.getClassifications(120000);
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
log.warn("========= OpenCVTest - setupbefore class - started getClassifications {} ms =========", System.currentTimeMillis()-ts );
ts = System.currentTimeMillis();
log.warn("========= OpenCVTest - setupbefore class - starting getOpenCVData =========");
cv.reset();
cv.capture(TEST_LOCAL_MP4);
cv.getOpenCVData();
log.warn("========= OpenCVTest - setupbefore class - started getOpenCVData {} ms =========", System.currentTimeMillis()-ts );
cv.disableAll();
// if (!isHeadless()) { - no longer needed I believe - SwingGui now handles it
// }
*/
}
// FIXME - do the following test
// test all frame grabber types
// test all filters !
// test remote file source
// test mpeg streamer
// @Ignore
@Test
public final void chaosCaptureTest() throws Exception {
log.warn("=======OpenCVTest chaosCaptureTest=======");
ChaosMonkey.giveToMonkey(cv, "capture", TEST_LOCAL_FACE_FILE_JPEG);
ChaosMonkey.giveToMonkey(cv, "capture");
ChaosMonkey.giveToMonkey(cv, "stopCapture");
if (hasInternet()) {
// red pill green pill
ChaosMonkey.giveToMonkey(cv, "capture", TEST_LOCAL_MP4);
ChaosMonkey.giveToMonkey(cv, "capture", TEST_REMOTE_FILE_JPG);
}
ChaosMonkey.giveToMonkey(cv, "stopCapture");
if (!cv.isVirtual()) {
// if hasHardware camera index 0 - FIXME should check if camera 0 exists ?
ChaosMonkey.giveToMonkey(cv, "capture", 0);
}
ChaosMonkey.startMonkeys();
ChaosMonkey.monkeyReport();
// check after the monkeys have pounded on it - it still works !
cv.reset();
cv.removeFilters();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
List<Classification> data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
@Test
public final void simpleFaces() {
log.warn("=======OpenCVTest simpleFaces=======");
cv.reset();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
List<Classification> data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
@Test
public final void testAllCaptures() throws Exception {
log.warn("=======OpenCVTest testAllCaptures=======");
List<Classification> data = null;
/**
* Testing default captures after a reset when the frame grabber type is not
* explicitly set
*/
if (hasInternet()) {
// default internet jpg
cv.reset();
// cv.capture("https://upload.wikimedia.org/wikipedia/commons/c/c0/Douglas_adams_portrait_cropped.jpg");
cv.capture(TEST_REMOTE_FILE_JPG);
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
// default local mp4
cv.reset();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
// default local jpg
cv.reset();
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
// default local directory
cv.reset();
cv.capture(TEST_INPUT_DIR);
assertNotNull(data);
/**
* Test ImageFile frame grabber
*/
if (hasInternet()) {
cv.reset();
cv.setGrabberType("ImageFile");
cv.capture("https://upload.wikimedia.org/wikipedia/commons/c/c0/Douglas_adams_portrait_cropped.jpg");
data = cv.getFaces(MAX_TIMEOUT);
assertNotNull(data);
assertTrue(data.size() > 0);
}
}
// TODO test enable disable & enableDisplay
/**
* minimally all filters should have the ability to load and run by themselves
* for a second
*/
@Test
public final void testAllFilterTypes() {
log.warn("=======OpenCVTest testAllFilterTypes=======");
log.info("starting all filters test");
cv.reset();
// 19 second blue red pill
cv.capture(TEST_LOCAL_MP4);
for (String fn : OpenCV.POSSIBLE_FILTERS) {
log.info("trying filter {}", fn);
if (fn.startsWith("DL4J") || fn.startsWith("Tesseract") || fn.startsWith("SimpleBlobDetector") || fn.startsWith("Solr") || fn.startsWith("Split")) {
log.info("skipping {}", fn);
continue;
}
cv.addFilter(fn);
sleep(1000);
cv.removeFilters();
}
log.info("done with all filters");
}
@Test
public final void testGetClassifications() {
log.warn("=======OpenCVTest testGetClassifications=======");
cv.reset();
// cv.removeFilters();
log.warn("=======OpenCVTest testGetClassifications - 1=======");
cv.capture(TEST_LOCAL_FACE_FILE_JPEG);
// OpenCVFilter f =
log.warn("=======OpenCVTest testGetClassifications - 2=======");
cv.addFilter("yolo");
log.warn("=======OpenCVTest testGetClassifications - 3=======");
// f.enable();
Map<String, List<Classification>> classifications = cv.getClassifications(MAX_TIMEOUT);
log.warn("=======OpenCVTest testGetClassifications - 4 {} =======", classifications);
assertNotNull(classifications);
log.warn("=======OpenCVTest testGetClassifications - 5 =======");
assertTrue(classifications.containsKey("person"));
log.warn("=======OpenCVTest testGetClassifications - 6 =======");
}
}
|
print locale
|
src/test/java/org/myrobotlab/service/OpenCVTest.java
|
print locale
|
|
Java
|
apache-2.0
|
58a676001e582201d0fa4648dd3d29106c80a879
| 0
|
sekikn/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller;
import static java.util.Collections.singletonList;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Matcher;
import org.apache.ambari.annotations.Experimental;
import org.apache.ambari.annotations.ExperimentalFeature;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.actionmanager.RequestFactory;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.actionmanager.StageFactory;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorHelper;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
import org.apache.ambari.server.controller.internal.RequestStageContainer;
import org.apache.ambari.server.controller.utilities.KerberosChecker;
import org.apache.ambari.server.metadata.RoleCommandOrder;
import org.apache.ambari.server.orm.dao.ArtifactDAO;
import org.apache.ambari.server.orm.dao.HostDAO;
import org.apache.ambari.server.orm.dao.KerberosKeytabDAO;
import org.apache.ambari.server.orm.dao.KerberosKeytabPrincipalDAO;
import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
import org.apache.ambari.server.orm.entities.ArtifactEntity;
import org.apache.ambari.server.orm.entities.HostEntity;
import org.apache.ambari.server.orm.entities.KerberosKeytabEntity;
import org.apache.ambari.server.orm.entities.KerberosKeytabPrincipalEntity;
import org.apache.ambari.server.security.credential.Credential;
import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
import org.apache.ambari.server.security.encryption.CredentialStoreService;
import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
import org.apache.ambari.server.serveraction.kerberos.Component;
import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIdentitiesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
import org.apache.ambari.server.serveraction.kerberos.FinalizeKerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.KDCType;
import org.apache.ambari.server.serveraction.kerberos.KerberosAdminAuthenticationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriter;
import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosKDCConnectionException;
import org.apache.ambari.server.serveraction.kerberos.KerberosKDCSSLConnectionException;
import org.apache.ambari.server.serveraction.kerberos.KerberosLDAPContainerException;
import org.apache.ambari.server.serveraction.kerberos.KerberosMissingAdminCredentialsException;
import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandler;
import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandlerFactory;
import org.apache.ambari.server.serveraction.kerberos.KerberosRealmException;
import org.apache.ambari.server.serveraction.kerberos.KerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.PrepareDisableKerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.PrepareEnableKerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.PrepareKerberosIdentitiesServerAction;
import org.apache.ambari.server.serveraction.kerberos.UpdateKerberosConfigsServerAction;
import org.apache.ambari.server.serveraction.kerberos.stageutils.ResolvedKerberosKeytab;
import org.apache.ambari.server.serveraction.kerberos.stageutils.ResolvedKerberosPrincipal;
import org.apache.ambari.server.stageplanner.RoleGraph;
import org.apache.ambari.server.stageplanner.RoleGraphFactory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostState;
import org.apache.ambari.server.state.PropertyInfo;
import org.apache.ambari.server.state.SecurityType;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.State;
import org.apache.ambari.server.state.ValueAttributesInfo;
import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosPrincipalType;
import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.kerberos.VariableReplacementHelper;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Singleton;
import com.google.inject.persist.Transactional;
@Singleton
public class KerberosHelperImpl implements KerberosHelper {
public static final String BASE_LOG_DIR = "/tmp/ambari";
private static final Logger LOG = LoggerFactory.getLogger(KerberosHelperImpl.class);
/**
* The set of states a component may be in, indicating that is have been previously installed on
* the cluster.
* <p>
* These values are important when trying to determine the state of the cluster when adding new components
*/
private static final Set<State> PREVIOUSLY_INSTALLED_STATES = EnumSet.of(State.INSTALLED, State.STARTED, State.DISABLED);
public static final String CHECK_KEYTABS = "CHECK_KEYTABS";
public static final String SET_KEYTAB = "SET_KEYTAB";
public static final String REMOVE_KEYTAB = "REMOVE_KEYTAB";
@Inject
private AmbariCustomCommandExecutionHelper customCommandExecutionHelper;
@Inject
private AmbariManagementController ambariManagementController;
@Inject
private AmbariMetaInfo ambariMetaInfo;
@Inject
private ActionManager actionManager;
@Inject
private RequestFactory requestFactory;
@Inject
private StageFactory stageFactory;
@Inject
private RoleGraphFactory roleGraphFactory;
@Inject
private Clusters clusters;
@Inject
private ConfigHelper configHelper;
@Inject
private VariableReplacementHelper variableReplacementHelper;
@Inject
private Configuration configuration;
@Inject
private KerberosOperationHandlerFactory kerberosOperationHandlerFactory;
@Inject
private KerberosDescriptorFactory kerberosDescriptorFactory;
@Inject
private ArtifactDAO artifactDAO;
@Inject
private KerberosPrincipalDAO kerberosPrincipalDAO;
@Inject
private KerberosKeytabDAO kerberosKeytabDAO;
@Inject
private KerberosKeytabPrincipalDAO kerberosKeytabPrincipalDAO;
@Inject
private HostDAO hostDAO;
/**
* The injector used to create new instances of helper classes like CreatePrincipalsServerAction
* and CreateKeytabFilesServerAction.
*/
@Inject
private Injector injector;
/**
* The secure storage facility to use to store KDC administrator credential.
*/
@Inject
private CredentialStoreService credentialStoreService;
@Inject
private StackAdvisorHelper stackAdvisorHelper;
@Override
public RequestStageContainer toggleKerberos(Cluster cluster, SecurityType securityType,
RequestStageContainer requestStageContainer,
Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
KerberosDetails kerberosDetails = getKerberosDetails(cluster, manageIdentities);
// Update KerberosDetails with the new security type - the current one in the cluster is the "old" value
kerberosDetails.setSecurityType(securityType);
if (securityType == SecurityType.KERBEROS) {
LOG.info("Configuring Kerberos for realm {} on cluster, {}", kerberosDetails.getDefaultRealm(), cluster.getClusterName());
requestStageContainer = handle(cluster, kerberosDetails, null, null, null, null, requestStageContainer, new EnableKerberosHandler());
} else if (securityType == SecurityType.NONE) {
LOG.info("Disabling Kerberos from cluster, {}", cluster.getClusterName());
requestStageContainer = handle(cluster, kerberosDetails, null, null, null, null, requestStageContainer, new DisableKerberosHandler());
} else {
throw new AmbariException(String.format("Unexpected security type value: %s", securityType.name()));
}
return requestStageContainer;
}
@Override
public RequestStageContainer executeCustomOperations(Cluster cluster, Map<String, String> requestProperties,
RequestStageContainer requestStageContainer,
Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
if (requestProperties != null) {
for (SupportedCustomOperation operation : SupportedCustomOperation.values()) {
if (requestProperties.containsKey(operation.name().toLowerCase())) {
String value = requestProperties.get(operation.name().toLowerCase());
// The operation specific logic is kept in one place and described here
switch (operation) {
case REGENERATE_KEYTABS:
if (cluster.getSecurityType() != SecurityType.KERBEROS) {
throw new AmbariException(String.format("Custom operation %s can only be requested with the security type cluster property: %s", operation.name(), SecurityType.KERBEROS.name()));
}
boolean retryAllowed = false;
if (requestProperties.containsKey(ALLOW_RETRY)) {
String allowRetryString = requestProperties.get(ALLOW_RETRY);
retryAllowed = Boolean.parseBoolean(allowRetryString);
}
CreatePrincipalsAndKeytabsHandler handler = null;
Set<String> hostFilter = parseHostFilter(requestProperties);
Map<String, Set<String>> serviceComponentFilter = parseComponentFilter(requestProperties);
boolean updateConfigurations = !requestProperties.containsKey(DIRECTIVE_IGNORE_CONFIGS)
|| !"true".equalsIgnoreCase(requestProperties.get(DIRECTIVE_IGNORE_CONFIGS));
boolean forceAllHosts = (hostFilter == null) || (hostFilter.contains("*"));
if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
handler = new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.RECREATE_ALL, updateConfigurations, forceAllHosts, true);
} else if ("missing".equalsIgnoreCase(value)) {
handler = new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.CREATE_MISSING, updateConfigurations, forceAllHosts, true);
}
if (handler != null) {
handler.setRetryAllowed(retryAllowed);
requestStageContainer = handle(cluster, getKerberosDetails(cluster, manageIdentities),
serviceComponentFilter, hostFilter, null, null, requestStageContainer, handler);
} else {
throw new AmbariException(String.format("Unexpected directive value: %s", value));
}
break;
default: // No other operations are currently supported
throw new AmbariException(String.format("Custom operation not supported: %s", operation.name()));
}
}
}
}
return requestStageContainer;
}
/**
* Parsing 'Kerberos/hosts' property to get list of hosts for 'regenerate_keytabs' request.
* Must be a string with coma separated list of hosts. Absent or miss-spelled hosts must be silently ignored
* by caller code.
*
* @param requestProperties
* @return
*/
public static Set<String> parseHostFilter(final Map<String, String> requestProperties) {
if (requestProperties.containsKey(DIRECTIVE_HOSTS)) {
return ImmutableSet.copyOf(requestProperties.get(DIRECTIVE_HOSTS).split(","));
}
return null;
}
/**
* Parsing 'Kerberos/components' property to get list of components for 'regenerate_keytabs' request.
* Must be a comma separated list of strings that follow pattern 'SERVICENAME:COMPONENTNAME;ANOTHERCOMPONENTNAME'.
* For example: HDFS:NAMENODE;DATANODE,YARN:RESOURCEMANAGER,ZOOKEEPER:ZOOKEEPER_SERVER;ZOOKEEPER_CLIENT.
* Absent or miss-spelled components and services must be silently ignored by caller code.
*
* @param requestProperties
* @return
*/
public static Map<String, Set<String>> parseComponentFilter(final Map<String, String> requestProperties) {
if (requestProperties.containsKey(DIRECTIVE_COMPONENTS)) {
ImmutableMap.Builder<String, Set<String>> serviceComponentFilter = ImmutableMap.builder();
for (String serviceString : requestProperties.get(DIRECTIVE_COMPONENTS).split(",")) {
String[] serviceComponentsArray = serviceString.split(":");
String serviceName = serviceComponentsArray[0];
if (serviceComponentsArray.length == 2) {
serviceComponentFilter.put(serviceName, ImmutableSet.copyOf(serviceComponentsArray[1].split(";")));
} else {
serviceComponentFilter.put(serviceName, ImmutableSet.of("*"));
}
}
return serviceComponentFilter.build();
}
return null;
}
@Override
public RequestStageContainer ensureIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsToForceKerberosOperations,
RequestStageContainer requestStageContainer, Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, hostFilter, identityFilter,
hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.DEFAULT, false, false,
false));
}
@Override
public RequestStageContainer deleteIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, RequestStageContainer requestStageContainer,
Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, hostFilter, identityFilter, null,
requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
}
/**
* Deletes the kerberos identities of the given component, even if the component is already deleted.
*/
@Override
public void deleteIdentities(Cluster cluster, List<Component> components, Set<String> identities) throws AmbariException, KerberosOperationException {
if (identities.isEmpty()) {
return;
}
LOG.info("Deleting identities: ", identities);
KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
validateKDCCredentials(kerberosDetails, cluster);
File dataDirectory = createTemporaryDirectory();
RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
DeleteIdentityHandler handler = new DeleteIdentityHandler(customCommandExecutionHelper, configuration.getDefaultServerTaskTimeout(), stageFactory, ambariManagementController);
DeleteIdentityHandler.CommandParams commandParameters = new DeleteIdentityHandler.CommandParams(
components,
identities,
ambariManagementController.getAuthName(),
dataDirectory,
kerberosDetails.getDefaultRealm(),
kerberosDetails.getKdcType());
OrderedRequestStageContainer stageContainer = new OrderedRequestStageContainer(
roleGraphFactory,
roleCommandOrder,
new RequestStageContainer(actionManager.getNextRequestId(), null, requestFactory, actionManager));
handler.addDeleteIdentityStages(cluster, stageContainer, commandParameters, kerberosDetails.manageIdentities());
stageContainer.getRequestStageContainer().persist();
}
@Override
public void configureServices(Cluster cluster, Map<String, Collection<String>> serviceFilter)
throws AmbariException, KerberosInvalidConfigurationException {
final Map<String, Set<String>> installedServices = new HashMap<>();
final Set<String> previouslyExistingServices = new HashSet<>();
// Calculate the map of installed services to installed components.
// We can create the map in the "shouldIncludeCommand" Command to avoid having to iterate
// over the returned ServiceComponentHost List.
getServiceComponentHosts(cluster,
new Command<Boolean, ServiceComponentHost>() {
@Override
public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
if (sch != null) {
String serviceName = sch.getServiceName();
Set<String> installedComponents = installedServices.get(serviceName);
if (installedComponents == null) {
installedComponents = new HashSet<>();
installedServices.put(serviceName, installedComponents);
}
installedComponents.add(sch.getServiceComponentName());
// Determine if this component was PREVIOUSLY installed, which implies that its containing service was PREVIOUSLY installed
if (!previouslyExistingServices.contains(serviceName) && PREVIOUSLY_INSTALLED_STATES.contains(sch.getState())) {
previouslyExistingServices.add(serviceName);
}
return true;
}
return false;
}
});
Map<String, Map<String, String>> existingConfigurations = calculateExistingConfigurations(cluster, null);
Map<String, Map<String, String>> updates = getServiceConfigurationUpdates(cluster,
existingConfigurations, installedServices, serviceFilter, previouslyExistingServices, true, true);
// Store the updates...
for (Map.Entry<String, Map<String, String>> entry : updates.entrySet()) {
configHelper.updateConfigType(cluster, cluster.getDesiredStackVersion(),
ambariManagementController, entry.getKey(), entry.getValue(), null,
ambariManagementController.getAuthName(), "Enabling Kerberos for added components");
}
}
@Override
public Map<String, Map<String, String>> getServiceConfigurationUpdates(Cluster cluster,
Map<String, Map<String, String>> existingConfigurations,
Map<String, Set<String>> installedServices,
Map<String, Collection<String>> serviceFilter,
Set<String> previouslyExistingServices,
boolean kerberosEnabled,
boolean applyStackAdvisorUpdates)
throws KerberosInvalidConfigurationException, AmbariException {
Map<String, Map<String, String>> kerberosConfigurations = new HashMap<>();
KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
Map<String, Map<String, String>> configurations = addAdditionalConfigurations(cluster,
deepCopy(existingConfigurations), null, kerberosDescriptorProperties);
Map<String, Set<String>> propertiesToIgnore = new HashMap<>();
// If Ambari is managing it own identities then add AMBARI to the set of installed service so
// that its Kerberos descriptor entries will be included.
if (createAmbariIdentities(existingConfigurations.get(KERBEROS_ENV))) {
installedServices = new HashMap<>(installedServices);
installedServices.put(RootService.AMBARI.name(), Collections.singleton(RootComponent.AMBARI_SERVER.name()));
}
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", configurations);
filterContext.put("services", installedServices.keySet());
for (Map.Entry<String, Set<String>> installedServiceEntry : installedServices.entrySet()) {
String installedService = installedServiceEntry.getKey();
if ((serviceFilter == null) || (serviceFilter.containsKey(installedService))) {
Collection<String> componentFilter = (serviceFilter == null) ? null : serviceFilter.get(installedService);
Set<String> installedComponents = installedServiceEntry.getValue();
// Set properties...
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(installedService);
if (serviceDescriptor != null) {
if (installedComponents != null) {
boolean servicePreviouslyExisted = (previouslyExistingServices != null) && previouslyExistingServices.contains(installedService);
for (String installedComponent : installedComponents) {
if ((componentFilter == null) || componentFilter.contains(installedComponent)) {
KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(installedComponent);
if (componentDescriptor != null) {
Map<String, Map<String, String>> identityConfigurations;
identityConfigurations = getIdentityConfigurations(serviceDescriptor.getIdentities(true, filterContext));
processIdentityConfigurations(identityConfigurations, kerberosConfigurations, configurations, propertiesToIgnore);
identityConfigurations = getIdentityConfigurations(componentDescriptor.getIdentities(true, filterContext));
processIdentityConfigurations(identityConfigurations, kerberosConfigurations, configurations, propertiesToIgnore);
mergeConfigurations(kerberosConfigurations,
componentDescriptor.getConfigurations(!servicePreviouslyExisted), configurations, null);
}
}
}
}
}
}
}
setAuthToLocalRules(cluster, kerberosDescriptor, kerberosDetails.getDefaultRealm(), installedServices, configurations, kerberosConfigurations, false);
return (applyStackAdvisorUpdates)
? applyStackAdvisorUpdates(cluster, installedServices.keySet(), configurations, kerberosConfigurations, propertiesToIgnore,
new HashMap<>(), kerberosEnabled)
: kerberosConfigurations;
}
/**
* Adds host assignments, recommended by the Stack Advisor, to the configuration map (clusterHostInfo)
* for the components specified in the component filter <code>componentFilter</code> (or all if the
* component filter is <code>null</code>).
*
* @param cluster the cluster
* @param services the relevant services to consider
* @param componentFilter the set of components to add to the clusterHostInfo structure
* @param configurations the configurations map to update
* @throws AmbariException if an error occurs
*/
private void applyStackAdvisorHostRecommendations(Cluster cluster,
Set<String> services,
Set<String> componentFilter,
Map<String, Map<String, String>> configurations)
throws AmbariException {
StackId stackVersion = cluster.getCurrentStackVersion();
List<String> hostNames = new ArrayList<>();
Collection<Host> hosts = cluster.getHosts();
if (hosts != null) {
for (Host host : hosts) {
hostNames.add(host.getHostName());
}
}
StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
.forStack(stackVersion.getStackName(), stackVersion.getStackVersion())
.forServices(services)
.forHosts(hostNames)
.withComponentHostsMap(cluster.getServiceComponentHostMap(null, services))
.ofType(StackAdvisorRequest.StackAdvisorRequestType.HOST_GROUPS)
.build();
try {
RecommendationResponse response = stackAdvisorHelper.recommend(request);
RecommendationResponse.Recommendation recommendation = (response == null) ? null : response.getRecommendations();
RecommendationResponse.Blueprint blueprint = (recommendation == null) ? null : recommendation.getBlueprint();
Set<RecommendationResponse.HostGroup> hostGroups = (blueprint == null) ? null : blueprint.getHostGroups();
if (hostGroups != null) {
RecommendationResponse.BlueprintClusterBinding blueprintBinding = recommendation.getBlueprintClusterBinding();
Map<String, RecommendationResponse.BindingHostGroup> bindingMap = new HashMap<>();
if (blueprintBinding != null) {
Set<RecommendationResponse.BindingHostGroup> bindingHostGroups = blueprintBinding.getHostGroups();
if (bindingHostGroups != null) {
for (RecommendationResponse.BindingHostGroup bindingHostGroup : bindingHostGroups) {
bindingMap.put(bindingHostGroup.getName(), bindingHostGroup);
}
}
}
// Get (and created if needed) the clusterHostInfo map
Map<String, String> clusterHostInfoMap = configurations.get("clusterHostInfo");
if (clusterHostInfoMap == null) {
clusterHostInfoMap = new HashMap<>();
configurations.put("clusterHostInfo", clusterHostInfoMap);
}
// Iterate through the recommendations to find the recommended host assignments
for (RecommendationResponse.HostGroup hostGroup : hostGroups) {
Set<Map<String, String>> components = hostGroup.getComponents();
if (components != null) {
RecommendationResponse.BindingHostGroup binding = bindingMap.get(hostGroup.getName());
if (binding != null) {
Set<Map<String, String>> hostGroupHosts = binding.getHosts();
if (hostGroupHosts != null) {
for (Map<String, String> component : components) {
String componentName = component.get("name");
// If the component filter is null or the current component is found in the filter,
// include it in the map
if ((componentFilter == null) || componentFilter.contains(componentName)) {
String key = StageUtils.getClusterHostInfoKey(componentName);
Set<String> fqdns = new TreeSet<>();
// Values are a comma-delimited list of hosts.
// If a value exists, split it and add the tokens to the set
if (!StringUtils.isEmpty(clusterHostInfoMap.get(key))) {
fqdns.addAll(Arrays.asList(clusterHostInfoMap.get(key).split(",")));
}
// Add the set of hosts for the current host group
for (Map<String, String> hostGroupHost : hostGroupHosts) {
String fqdn = hostGroupHost.get("fqdn");
if (!StringUtils.isEmpty(fqdn)) {
fqdns.add(fqdn);
}
}
// create the comma-delimited list of hosts
clusterHostInfoMap.put(key, StringUtils.join(fqdns, ','));
}
}
}
}
}
}
}
} catch (StackAdvisorException e) {
LOG.error("Failed to obtain the recommended host groups for the preconfigured components.", e);
throw new AmbariException(e.getMessage(), e);
}
}
@Override
public Map<String, Map<String, String>> applyStackAdvisorUpdates(Cluster cluster, Set<String> services,
Map<String, Map<String, String>> existingConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
Map<String, Set<String>> propertiesToIgnore,
Map<String, Set<String>> propertiesToRemove,
boolean kerberosEnabled) throws AmbariException {
List<String> hostNames = new ArrayList<>();
Collection<Host> hosts = cluster.getHosts();
if (hosts != null) {
for (Host host : hosts) {
hostNames.add(host.getHostName());
}
}
// Don't actually call the stack advisor if no hosts are in the cluster, else the stack advisor
// will throw a StackAdvisorException stating "Hosts and services must not be empty".
// This could happen when enabling Kerberos while installing a cluster via Blueprints due to the
// way hosts are discovered during the install process.
if (!hostNames.isEmpty()) {
Map<String, Map<String, Map<String, String>>> requestConfigurations = new HashMap<>();
if (existingConfigurations != null) {
for (Map.Entry<String, Map<String, String>> configuration : existingConfigurations.entrySet()) {
Map<String, Map<String, String>> properties = new HashMap<>();
String configType = configuration.getKey();
Map<String, String> configurationProperties = configuration.getValue();
if (configurationProperties == null) {
configurationProperties = Collections.emptyMap();
}
if ("cluster-env".equals(configType)) {
configurationProperties = new HashMap<>(configurationProperties);
configurationProperties.put("security_enabled", (kerberosEnabled) ? "true" : "false");
}
properties.put("properties", configurationProperties);
requestConfigurations.put(configType, properties);
}
}
// Apply the current Kerberos properties...
for (Map.Entry<String, Map<String, String>> configuration : kerberosConfigurations.entrySet()) {
String configType = configuration.getKey();
Map<String, String> configurationProperties = configuration.getValue();
if ((configurationProperties != null) && !configurationProperties.isEmpty()) {
Map<String, Map<String, String>> requestConfiguration = requestConfigurations.get(configType);
if (requestConfiguration == null) {
requestConfiguration = new HashMap<>();
requestConfigurations.put(configType, requestConfiguration);
}
Map<String, String> requestConfigurationProperties = requestConfiguration.get("properties");
if (requestConfigurationProperties == null) {
requestConfigurationProperties = new HashMap<>();
} else {
requestConfigurationProperties = new HashMap<>(requestConfigurationProperties);
}
requestConfigurationProperties.putAll(configurationProperties);
requestConfiguration.put("properties", requestConfigurationProperties);
}
}
Set<StackId> visitedStacks = new HashSet<>();
Map<String, Service> installedServices = cluster.getServices();
for (String serviceName : services) {
Service service = installedServices.get(serviceName);
// Skip services that are not really installed
if (service == null) {
continue;
}
StackId stackId = service.getDesiredStackId();
if (visitedStacks.contains(stackId)) {
continue;
}
for (Map.Entry<String, Map<String, Map<String, String>>> config : requestConfigurations.entrySet()) {
for (Map<String, String> properties : config.getValue().values()) {
for (Map.Entry<String, String> property : properties.entrySet()) {
String oldValue = property.getValue();
String updatedValue = variableReplacementHelper.replaceVariables(property.getValue(), existingConfigurations);
if (!StringUtils.equals(oldValue, updatedValue) && !config.getKey().isEmpty()) {
property.setValue(updatedValue);
if (kerberosConfigurations.containsKey(config.getKey())) {
kerberosConfigurations.get(config.getKey()).put(property.getKey(), updatedValue);
} else {
Map kerberosConfigProperties = new HashMap<>();
kerberosConfigProperties.put(property.getKey(), updatedValue);
kerberosConfigurations.put(config.getKey(), kerberosConfigProperties);
}
}
}
}
}
StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
.forStack(stackId.getStackName(), stackId.getStackVersion())
.forServices(services)
.forHosts(hostNames)
.withComponentHostsMap(cluster.getServiceComponentHostMap(null, services))
.withConfigurations(requestConfigurations)
.ofType(StackAdvisorRequest.StackAdvisorRequestType.KERBEROS_CONFIGURATIONS)
.build();
try {
RecommendationResponse response = stackAdvisorHelper.recommend(request);
RecommendationResponse.Recommendation recommendation = (response == null) ? null : response.getRecommendations();
RecommendationResponse.Blueprint blueprint = (recommendation == null) ? null : recommendation.getBlueprint();
Map<String, RecommendationResponse.BlueprintConfigurations> configurations = (blueprint == null) ? null : blueprint.getConfigurations();
if (configurations != null) {
for (Map.Entry<String, RecommendationResponse.BlueprintConfigurations> configuration : configurations.entrySet()) {
String configType = configuration.getKey();
Map<String, String> recommendedConfigProperties = configuration.getValue().getProperties();
Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes = configuration.getValue().getPropertyAttributes();
Map<String, String> existingConfigProperties = (existingConfigurations == null) ? null : existingConfigurations.get(configType);
Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
Set<String> ignoreProperties = (propertiesToIgnore == null) ? null : propertiesToIgnore.get(configType);
addRecommendedPropertiesForConfigType(kerberosConfigurations, configType, recommendedConfigProperties,
existingConfigProperties, kerberosConfigProperties, ignoreProperties);
if (recommendedConfigPropertyAttributes != null) {
removeRecommendedPropertiesForConfigType(configType, recommendedConfigPropertyAttributes,
existingConfigProperties, kerberosConfigurations, ignoreProperties, propertiesToRemove);
}
}
}
} catch (Exception e) {
throw new AmbariException(e.getMessage(), e);
}
visitedStacks.add(stackId);
}
}
return kerberosConfigurations;
}
/*
* Recommended property will be updated in or added to kerberosConfigurationS.
*/
private void addRecommendedPropertiesForConfigType(Map<String, Map<String, String>> kerberosConfigurations,
String configType, Map<String, String> recommendedConfigProperties,
Map<String, String> existingConfigProperties,
Map<String, String> kerberosConfigProperties,
Set<String> ignoreProperties) {
for (Map.Entry<String, String> property : recommendedConfigProperties.entrySet()) {
String propertyName = property.getKey();
if ((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) {
String recommendedValue = property.getValue();
if ((kerberosConfigProperties == null) || !kerberosConfigProperties.containsKey(propertyName)) {
// There is no explicit update for this property from the Kerberos Descriptor...
// add the config and property if it also does not exist in the existing configurations
if ((existingConfigProperties == null) || !existingConfigProperties.containsKey(propertyName)) {
LOG.debug("Adding Kerberos configuration based on StackAdvisor recommendation:" +
"\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
configType, propertyName, recommendedValue);
if (kerberosConfigProperties == null) {
kerberosConfigProperties = new HashMap<>();
kerberosConfigurations.put(configType, kerberosConfigProperties);
}
kerberosConfigProperties.put(propertyName, recommendedValue);
}
} else {
String value = kerberosConfigProperties.get(propertyName);
if ((value == null) ? (recommendedValue != null) : !value.equals(recommendedValue)) {
// If the recommended value is a change, automatically change it.
LOG.debug("Updating Kerberos configuration based on StackAdvisor recommendation:" +
"\n\tConfigType: {}\n\tProperty: {}\n\tOld Value: {}\n\tNew Value: {}",
configType, propertyName, (value == null) ? "" : value, (recommendedValue == null) ? "" : recommendedValue);
kerberosConfigProperties.put(propertyName, recommendedValue);
}
}
}
}
}
/**
* If property is marked with delete flag in recommendedConfigPropertyAttributes map and is not found in
* ignoreProperties, nor in kerberosConfigProperties but exits in existingConfigProperties add to
* propertiesToRemove map.
*/
private void removeRecommendedPropertiesForConfigType(String configType,
Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes,
Map<String, String> existingConfigProperties,
Map<String, Map<String, String>> kerberosConfigurations,
Set<String> ignoreProperties,
Map<String, Set<String>> propertiesToRemove) {
for (Map.Entry<String, ValueAttributesInfo> property : recommendedConfigPropertyAttributes.entrySet()) {
String propertyName = property.getKey();
if ("true".equalsIgnoreCase(property.getValue().getDelete())) {
// if property is not in ignoreProperties, nor in kerberosConfigProperties but is found in existingConfigProperties
// add to propertiesToBeRemoved map
Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
if (((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) &&
((kerberosConfigProperties == null) || kerberosConfigProperties.get(propertyName) == null) &&
(existingConfigProperties != null && existingConfigProperties.containsKey(propertyName))) {
LOG.debug("Property to remove from configuration based on StackAdvisor recommendation:" +
"\n\tConfigType: {}\n\tProperty: {}",
configType, propertyName);
// kerberosEnabled add property to propertiesToRemove, otherwise to kerberosConfigurations map
if (propertiesToRemove != null) {
Set<String> properties = propertiesToRemove.get(configType);
if (properties == null) {
properties = new HashSet<>();
propertiesToRemove.put(configType, properties);
}
properties.add(propertyName);
} else {
if (kerberosConfigProperties == null) {
kerberosConfigProperties = new HashMap<>();
kerberosConfigurations.put(configType, kerberosConfigProperties);
}
kerberosConfigProperties.put(propertyName, "");
}
}
}
}
}
@Override
public boolean ensureHeadlessIdentities(Cluster cluster, Map<String, Map<String, String>> existingConfigurations, Set<String> services)
throws KerberosInvalidConfigurationException, AmbariException {
KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
// Only perform this task if Ambari manages Kerberos identities
if (kerberosDetails.manageIdentities()) {
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
Map<String, Map<String, String>> configurations = addAdditionalConfigurations(cluster,
deepCopy(existingConfigurations), null, kerberosDescriptorProperties);
Map<String, String> kerberosConfiguration = kerberosDetails.getKerberosEnvProperties();
KerberosOperationHandler kerberosOperationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
PrincipalKeyCredential administratorCredential = getKDCAdministratorCredentials(cluster.getClusterName());
try {
kerberosOperationHandler.open(administratorCredential, kerberosDetails.getDefaultRealm(), kerberosConfiguration);
} catch (KerberosOperationException e) {
String message = String.format("Failed to process the identities, could not properly open the KDC operation handler: %s",
e.getMessage());
LOG.error(message);
throw new AmbariException(message, e);
}
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", configurations);
filterContext.put("services", services);
for (String serviceName : services) {
// Set properties...
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
if (serviceDescriptor != null) {
Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
if (null != componentDescriptors) {
for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
if (componentDescriptor != null) {
List<KerberosIdentityDescriptor> identityDescriptors;
// Handle the service-level Kerberos identities
identityDescriptors = serviceDescriptor.getIdentities(true, filterContext);
if (identityDescriptors != null) {
for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
createIdentity(identityDescriptor, KerberosPrincipalType.USER, kerberosConfiguration, kerberosOperationHandler, configurations, null);
}
}
// Handle the component-level Kerberos identities
identityDescriptors = componentDescriptor.getIdentities(true, filterContext);
if (identityDescriptors != null) {
for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
createIdentity(identityDescriptor, KerberosPrincipalType.USER, kerberosConfiguration, kerberosOperationHandler, configurations, null);
}
}
}
}
}
}
}
// create Ambari principal & keytab, configure JAAS only if 'kerberos-env.create_ambari_principal = true'
if (kerberosDetails.createAmbariPrincipal()) {
installAmbariIdentities(kerberosDescriptor, kerberosOperationHandler, kerberosConfiguration, configurations, kerberosDetails);
}
// The KerberosOperationHandler needs to be closed, if it fails to close ignore the
// exception since there is little we can or care to do about it now.
try {
kerberosOperationHandler.close();
} catch (KerberosOperationException e) {
// Ignore this...
}
}
return true;
}
/**
* Install identities needed by the Ambari server, itself.
* <p>
* The Ambari server needs its own identity for authentication; and, if Kerberos authentication is
* enabled, it needs a SPNEGO principal for ticket validation routines.
* <p>
* Any identities needed by the Ambari server need to be installed separately since an agent may not
* exist on the host and therefore distributing the keytab file(s) to the Ambari server host may
* not be possible using the same workflow used for other hosts in the cluster.
*
* @param kerberosDescriptor the Kerberos descriptor
* @param kerberosOperationHandler the relevant KerberosOperationHandler
* @param kerberosEnvProperties the kerberos-env properties
* @param configurations a map of config-types to property name/value pairs representing
* the existing configurations for the cluster
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos
* configuration
* @throws AmbariException
*/
private void installAmbariIdentities(KerberosDescriptor kerberosDescriptor,
KerberosOperationHandler kerberosOperationHandler,
Map<String, String> kerberosEnvProperties,
Map<String, Map<String, String>> configurations,
KerberosDetails kerberosDetails) throws AmbariException {
// Install Ambari's identities.....
List<KerberosIdentityDescriptor> ambariIdentities = getAmbariServerIdentities(kerberosDescriptor);
if (!ambariIdentities.isEmpty()) {
String ambariServerHostname = StageUtils.getHostName();
for (KerberosIdentityDescriptor identity : ambariIdentities) {
if (identity != null) {
KerberosPrincipalDescriptor principal = identity.getPrincipalDescriptor();
if (principal != null) {
boolean updateJAASFile = AMBARI_SERVER_KERBEROS_IDENTITY_NAME.equals(identity.getName());
Keytab keytab = createIdentity(identity, principal.getType(), kerberosEnvProperties, kerberosOperationHandler, configurations, ambariServerHostname);
installAmbariIdentity(identity, keytab, configurations, ambariServerHostname, kerberosDetails, updateJAASFile);
if (updateJAASFile) {
try {
KerberosChecker.checkJaasConfiguration();
} catch (AmbariException e) {
LOG.error("Error in Ambari JAAS configuration: " + e.getLocalizedMessage(), e);
}
}
}
}
}
}
}
/**
* Performs tasks needed to install the Kerberos identities created for the Ambari server.
*
* @param ambariServerIdentity the ambari server's {@link KerberosIdentityDescriptor}
* @param keytab the Keyab data for the relevant identity
* @param configurations a map of compiled configurations used for variable replacement
* @param hostname the hostname to use to replace _HOST in principal names, if necessary
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos configuration
* @param updateJAASFile true to update Ambari's JAAS file; false otherwise
* @throws AmbariException
* @see ConfigureAmbariIdentitiesServerAction#configureJAAS(String, String, org.apache.ambari.server.serveraction.ActionLog)
*/
private void installAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
Keytab keytab, Map<String, Map<String, String>> configurations,
String hostname,
KerberosDetails kerberosDetails,
boolean updateJAASFile) throws AmbariException {
KerberosPrincipalDescriptor principalDescriptor = ambariServerIdentity.getPrincipalDescriptor();
if (principalDescriptor != null) {
String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
// Replace _HOST with the supplied hostname is either exist
if (!StringUtils.isEmpty(hostname)) {
principal = principal.replace("_HOST", hostname);
}
KerberosKeytabDescriptor keytabDescriptor = ambariServerIdentity.getKeytabDescriptor();
if (keytabDescriptor != null) {
String destKeytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
File destKeytabFile = new File(destKeytabFilePath);
ConfigureAmbariIdentitiesServerAction configureAmbariIdentitiesServerAction = injector.getInstance(ConfigureAmbariIdentitiesServerAction.class);
if (keytab != null) {
try {
KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
File tmpKeytabFile = createTemporaryFile();
try {
if ((operationHandler != null) && operationHandler.createKeytabFile(keytab, tmpKeytabFile)) {
String ownerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
String ownerAccess = keytabDescriptor.getOwnerAccess();
String groupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
String groupAccess = keytabDescriptor.getGroupAccess();
// TODO check if this reliable
String componentName = principal.contains(KerberosHelper.AMBARI_SERVER_KERBEROS_IDENTITY_NAME)
? "AMBARI_SERVER_SELF"
: RootComponent.AMBARI_SERVER.name();
ResolvedKerberosPrincipal resolvedKerberosPrincipal = new ResolvedKerberosPrincipal(
null,
hostname,
principal,
false,
null,
RootService.AMBARI.name(),
componentName,
destKeytabFilePath
);
configureAmbariIdentitiesServerAction.installAmbariServerIdentity(resolvedKerberosPrincipal, tmpKeytabFile.getAbsolutePath(), destKeytabFilePath,
ownerName, ownerAccess, groupName, groupAccess, null);
LOG.debug("Successfully created keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
} else {
LOG.error("Failed to create keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
}
} finally {
tmpKeytabFile.delete();
}
} catch (KerberosOperationException e) {
throw new AmbariException(String.format("Failed to create keytab file for %s at %s: %s:",
principal, destKeytabFile.getAbsolutePath(), e.getLocalizedMessage()), e);
}
} else {
LOG.error("No keytab data is available to create the keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
}
if (updateJAASFile) {
configureAmbariIdentitiesServerAction.configureJAAS(principal, destKeytabFile.getAbsolutePath(), null);
}
}
}
}
@Override
public RequestStageContainer createTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
RequestStageContainer requestStageContainer)
throws KerberosOperationException, AmbariException {
return handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer,
new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.DEFAULT, false, false, false));
}
@Override
public RequestStageContainer deleteTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
RequestStageContainer requestStageContainer)
throws KerberosOperationException, AmbariException {
requestStageContainer = handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
return requestStageContainer;
}
@Override
public void validateKDCCredentials(Cluster cluster) throws KerberosMissingAdminCredentialsException,
KerberosAdminAuthenticationException,
KerberosInvalidConfigurationException,
AmbariException {
validateKDCCredentials(null, cluster);
}
@Override
public void setAuthToLocalRules(Cluster cluster,
KerberosDescriptor kerberosDescriptor, String realm,
Map<String, Set<String>> installedServices,
Map<String, Map<String, String>> existingConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
boolean includePreconfigureData)
throws AmbariException {
boolean processAuthToLocalRules = true;
Map<String, String> kerberosEnvProperties = existingConfigurations.get(KERBEROS_ENV);
if (kerberosEnvProperties.containsKey(MANAGE_AUTH_TO_LOCAL_RULES)) {
processAuthToLocalRules = Boolean.valueOf(kerberosEnvProperties.get(MANAGE_AUTH_TO_LOCAL_RULES));
}
if (kerberosDescriptor != null && processAuthToLocalRules) {
Set<String> authToLocalProperties;
Set<String> authToLocalPropertiesToSet = new HashSet<>();
// a flag to be used by the AuthToLocalBuilder marking whether the default realm rule should contain the //L option, indicating username case insensitive behaviour
// the 'kerberos-env' structure is expected to be available here as it was previously validated
boolean caseInsensitiveUser = Boolean.valueOf(existingConfigurations.get(KERBEROS_ENV).get(CASE_INSENSITIVE_USERNAME_RULES));
// Additional realms that need to be handled according to the Kerberos Descriptor
String additionalRealms = kerberosDescriptor.getProperty("additional_realms");
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", existingConfigurations);
filterContext.put("services", installedServices.keySet());
AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder(realm, additionalRealms, caseInsensitiveUser);
// Add in the default configurations for the services that need to be preconfigured. These
// configurations may be needed while calculating the auth-to-local rules.
Map<String, Map<String, String>> replacements = (includePreconfigureData)
? addConfigurationsForPreProcessedServices(deepCopy(existingConfigurations), cluster, kerberosDescriptor, false)
: existingConfigurations;
// Process top-level identities
addIdentities(authToLocalBuilder, kerberosDescriptor.getIdentities(true, filterContext), null, replacements);
// Determine which properties need to be set
authToLocalProperties = kerberosDescriptor.getAuthToLocalProperties();
if (authToLocalProperties != null) {
authToLocalPropertiesToSet.addAll(authToLocalProperties);
}
// Iterate through the services in the Kerberos descriptor. If a found service is installed
// or marked to be preconfigured, add the relevant data to the auth-to-local rules.
Map<String, KerberosServiceDescriptor> serviceDescriptors = kerberosDescriptor.getServices();
if (serviceDescriptors != null) {
for (KerberosServiceDescriptor serviceDescriptor : serviceDescriptors.values()) {
String serviceName = serviceDescriptor.getName();
boolean preconfigure = includePreconfigureData && serviceDescriptor.shouldPreconfigure();
boolean explicitlyAdded = installedServices.containsKey(serviceName);
// Add this service's identities if we are implicitly preconfigurring the service or if the
// service has been explicitly added to the cluster
if (preconfigure || explicitlyAdded) {
LOG.info("Adding identities for service {} to auth to local mapping [{}]",
serviceName,
(explicitlyAdded) ? "explicit" : "preconfigured");
// Process the service-level Kerberos descriptor
addIdentities(authToLocalBuilder, serviceDescriptor.getIdentities(true, filterContext), null, replacements);
authToLocalProperties = serviceDescriptor.getAuthToLocalProperties();
if (authToLocalProperties != null) {
authToLocalPropertiesToSet.addAll(authToLocalProperties);
}
// Process the relevant component-level Kerberos descriptors
Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
if (componentDescriptors != null) {
Set<String> installedServiceComponents = installedServices.get(serviceName);
// Ensure installedComponents is not null....
if (installedServiceComponents == null) {
installedServiceComponents = Collections.emptySet();
}
for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
String componentName = componentDescriptor.getName();
// Add this component's identities if we are implicitly preconfiguring the parent
// service or if the component has been explicitly added to the cluster
if (preconfigure || (installedServiceComponents.contains(componentName))) {
LOG.info("Adding identities for component {} to auth to local mapping", componentName);
addIdentities(authToLocalBuilder, componentDescriptor.getIdentities(true, filterContext), null, replacements);
authToLocalProperties = componentDescriptor.getAuthToLocalProperties();
if (authToLocalProperties != null) {
authToLocalPropertiesToSet.addAll(authToLocalProperties);
}
}
}
}
}
}
}
if (!authToLocalPropertiesToSet.isEmpty()) {
for (String authToLocalProperty : authToLocalPropertiesToSet) {
Matcher m = KerberosDescriptor.AUTH_TO_LOCAL_PROPERTY_SPECIFICATION_PATTERN.matcher(authToLocalProperty);
if (m.matches()) {
AuthToLocalBuilder builder;
try {
builder = (AuthToLocalBuilder) authToLocalBuilder.clone();
} catch (CloneNotSupportedException e) {
LOG.error("Failed to clone the AuthToLocalBuilder: " + e.getLocalizedMessage(), e);
throw new AmbariException("Failed to clone the AuthToLocalBuilder: " + e.getLocalizedMessage(), e);
}
String configType = m.group(1);
String propertyName = m.group(2);
if (configType == null) {
configType = "";
}
// Add existing auth_to_local configuration, if set
Map<String, String> existingConfiguration = existingConfigurations.get(configType);
if (existingConfiguration != null) {
builder.addRules(existingConfiguration.get(propertyName));
}
// Add/update descriptor auth_to_local configuration, if set
Map<String, String> kerberosConfiguration = kerberosConfigurations.get(configType);
if (kerberosConfiguration != null) {
builder.addRules(kerberosConfiguration.get(propertyName));
} else {
kerberosConfiguration = new HashMap<>();
kerberosConfigurations.put(configType, kerberosConfiguration);
}
kerberosConfiguration.put(propertyName,
builder.generate(AuthToLocalBuilder.ConcatenationType.translate(m.group(3))));
}
}
}
}
}
@Override
public List<ServiceComponentHost> getServiceComponentHostsToProcess(final Cluster cluster,
final KerberosDescriptor kerberosDescriptor,
final Map<String, ? extends Collection<String>> serviceComponentFilter,
final Collection<String> hostFilter)
throws AmbariException {
return getServiceComponentHosts(cluster, new Command<Boolean, ServiceComponentHost>() {
@Override
public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
if (sch != null) {
// Check the host filter
if ((hostFilter == null) || hostFilter.contains("*") || hostFilter.contains(sch.getHostName())) {
String serviceName = sch.getServiceName();
// Check the service filter
if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey("*") || serviceComponentFilter.containsKey(serviceName)) {
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
if (serviceDescriptor != null) {
Collection<String> componentFilter = ((serviceComponentFilter == null) || serviceComponentFilter.containsKey("*")) ? null : serviceComponentFilter.get(serviceName);
// Check the service/component filter
return (((componentFilter == null) || componentFilter.contains("*") || componentFilter.contains(sch.getServiceComponentName())));
}
}
}
}
return false;
}
});
}
/**
* Find the {@link ServiceComponentHost}s for the cluster, filtering using the
* supplied "should include" command (<code>shouldIncludeCommand</code>).
* <p>
* If <code>shouldIncludeCommand</code> is <code>null/code>, no filtering will be performed causing
* all found {@link ServiceComponentHost}s to be returned.
*
* @param cluster the cluster
* @param shouldIncludeCommand the filtering logic
* @return a list of (filtered) {@link ServiceComponentHost}s
* @throws AmbariException if an error occurs
*/
private List<ServiceComponentHost> getServiceComponentHosts(Cluster cluster,
Command<Boolean, ServiceComponentHost> shouldIncludeCommand)
throws AmbariException {
List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<>();
// Get the hosts in the cluster
Collection<Host> hosts = cluster.getHosts();
if ((hosts != null) && !hosts.isEmpty()) {
// Iterate over the hosts in the cluster to find the components installed in each.
for (Host host : hosts) {
String hostname = host.getHostName();
// Get a list of components on the current host
List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
// Iterate over the components installed on the current host and execute the shouldIncludeCommand
// Command (if supplied) to get the desired ServiceComponentHost instances.
for (ServiceComponentHost sch : serviceComponentHosts) {
if ((shouldIncludeCommand == null) || shouldIncludeCommand.invoke(sch)) {
serviceComponentHostsToProcess.add(sch);
}
}
}
}
}
return serviceComponentHostsToProcess;
}
@Override
public Set<String> getHostsWithValidKerberosClient(Cluster cluster)
throws AmbariException {
Set<String> hostsWithValidKerberosClient = new HashSet<>();
List<ServiceComponentHost> schKerberosClients = cluster.getServiceComponentHosts(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name());
if (schKerberosClients != null) {
for (ServiceComponentHost sch : schKerberosClients) {
if (sch.getState() == State.INSTALLED) {
hostsWithValidKerberosClient.add(sch.getHostName());
}
}
}
return hostsWithValidKerberosClient;
}
@Override
public KerberosDescriptor getKerberosDescriptor(Cluster cluster, boolean includePreconfigureData) throws AmbariException {
return getKerberosDescriptor(KerberosDescriptorType.COMPOSITE, cluster, false, null, includePreconfigureData);
}
@Override
public KerberosDescriptor getKerberosDescriptor(KerberosDescriptorType kerberosDescriptorType, Cluster cluster,
boolean evaluateWhenClauses, Collection<String> additionalServices,
boolean includePreconfigureData)
throws AmbariException {
// !!! FIXME in a per-service view, what does this become?
Set<StackId> stackIds = new HashSet<>();
for (Service service : cluster.getServices().values()) {
stackIds.add(service.getDesiredStackId());
}
if (1 != stackIds.size()) {
throw new AmbariException("Services are deployed from multiple stacks and cannot determine a unique one.");
}
StackId stackId = stackIds.iterator().next();
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(kerberosDescriptorType, cluster, stackId, includePreconfigureData);
if (evaluateWhenClauses) {
Set<String> services = new HashSet<>(cluster.getServices().keySet());
if (additionalServices != null) {
services.addAll(additionalServices);
}
// Build the context needed to filter out Kerberos identities...
// This includes the current set of configurations for the cluster and the set of installed services
Map<String, Object> context = new HashMap<>();
context.put("configurations", calculateConfigurations(cluster, null, kerberosDescriptor, false, false));
context.put("services", services);
// Get the Kerberos identities that need to be pruned
Map<String, Set<String>> identitiesToRemove = processWhenClauses("", kerberosDescriptor, context, new HashMap<>());
// Prune off the Kerberos identities that need to be removed due to the evaluation of its _when_ clause
for (Map.Entry<String, Set<String>> identity : identitiesToRemove.entrySet()) {
String[] path = identity.getKey().split("/");
AbstractKerberosDescriptorContainer container = null;
// Follow the path to the container that contains the identities to remove
for (String name : path) {
if (container == null) {
container = kerberosDescriptor;
} else {
container = container.getChildContainer(name);
if (container == null) {
break;
}
}
}
// Remove the relevant identities from the found container
if (container != null) {
for (String identityName : identity.getValue()) {
container.removeIdentity(identityName);
}
}
}
}
return kerberosDescriptor;
}
@Override
public KerberosDescriptor getKerberosDescriptor(KerberosDescriptorType kerberosDescriptorType, Cluster cluster,
StackId stackId, boolean includePreconfigureData) throws AmbariException {
KerberosDescriptor stackDescriptor = (kerberosDescriptorType == KerberosDescriptorType.STACK || kerberosDescriptorType == KerberosDescriptorType.COMPOSITE)
? getKerberosDescriptorFromStack(stackId, includePreconfigureData)
: null;
KerberosDescriptor userDescriptor = (kerberosDescriptorType == KerberosDescriptorType.USER || kerberosDescriptorType == KerberosDescriptorType.COMPOSITE)
? getKerberosDescriptorUpdates(cluster)
: null;
return combineKerberosDescriptors(stackDescriptor, userDescriptor);
}
@Override
public Map<String, Map<String, String>> mergeConfigurations(Map<String, Map<String, String>> configurations,
Map<String, KerberosConfigurationDescriptor> updates,
Map<String, Map<String, String>> replacements,
Set<String> configurationTypeFilter)
throws AmbariException {
if ((updates != null) && !updates.isEmpty()) {
if (configurations == null) {
configurations = new HashMap<>();
}
for (Map.Entry<String, KerberosConfigurationDescriptor> entry : updates.entrySet()) {
String type = entry.getKey();
if ((configurationTypeFilter == null) || (configurationTypeFilter.contains(type))) {
KerberosConfigurationDescriptor configurationDescriptor = entry.getValue();
if (configurationDescriptor != null) {
Map<String, String> updatedProperties = configurationDescriptor.getProperties();
mergeConfigurations(configurations, type, updatedProperties, replacements);
}
}
}
}
return configurations;
}
@Override
public Map<String, Map<String, String>> processPreconfiguredServiceConfigurations(Map<String, Map<String, String>> configurations,
Map<String, Map<String, String>> replacements,
Cluster cluster,
KerberosDescriptor kerberosDescriptor)
throws AmbariException {
// Ensure the Kerberos descriptor exists....
if (kerberosDescriptor == null) {
kerberosDescriptor = getKerberosDescriptor(cluster, true);
}
Map<String, KerberosServiceDescriptor> serviceDescriptors = kerberosDescriptor.getServices();
if (serviceDescriptors != null) {
if (configurations == null) {
configurations = new HashMap<>();
}
// Add in the default configurations for the services that need to be preconfigured. These
// configurations may be needed while calculating the auth-to-local rules.
Map<String, Map<String, String>> replacementsWithDefaults = addConfigurationsForPreProcessedServices(deepCopy(replacements), cluster, kerberosDescriptor, true);
Map<String, Service> existingServices = cluster.getServices();
for (KerberosServiceDescriptor serviceDescriptor : serviceDescriptors.values()) {
String serviceName = serviceDescriptor.getName();
boolean shouldPreconfigure = serviceDescriptor.shouldPreconfigure();
if (!existingServices.containsKey(serviceName) && shouldPreconfigure) {
configurations = mergeConfigurations(configurations, serviceDescriptor.getConfigurations(), replacementsWithDefaults, replacements.keySet());
Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
if (componentDescriptors != null) {
for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
configurations = mergeConfigurations(configurations, componentDescriptor.getConfigurations(), replacementsWithDefaults, replacements.keySet());
}
}
}
}
}
return configurations;
}
@Override
public int addIdentities(KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter,
Collection<KerberosIdentityDescriptor> identities,
Collection<String> identityFilter, String hostname, Long hostId, String serviceName,
String componentName, Map<String, Map<String, String>> kerberosConfigurations,
Map<String, Map<String, String>> configurations,
Map<String, ResolvedKerberosKeytab> resolvedKeytabs, String realm)
throws IOException {
int identitiesAdded = 0;
if (identities != null) {
for (KerberosIdentityDescriptor identity : identities) {
// If there is no filter or the filter contains the current identity's path...
if ((identityFilter == null) || identityFilter.contains(identity.getPath())) {
KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
String principal = null;
String principalType = null;
String principalConfiguration = null;
if (principalDescriptor != null) {
principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
principalType = KerberosPrincipalType.translate(principalDescriptor.getType());
principalConfiguration = variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations);
}
if (principal != null) {
KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
String keytabFilePath = null;
String keytabFileOwnerName = null;
String keytabFileOwnerAccess = null;
String keytabFileGroupName = null;
String keytabFileGroupAccess = null;
String keytabFileConfiguration = null;
if (keytabDescriptor != null) {
keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
keytabFileOwnerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
keytabFileOwnerAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations);
keytabFileGroupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
keytabFileGroupAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations);
keytabFileConfiguration = variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations);
}
if (keytabFileOwnerName == null || keytabFileGroupName == null) {
LOG.warn("Missing owner ({}) or group name ({}) of kerberos descriptor {}", keytabFileOwnerName, keytabFileGroupName, keytabDescriptor.getName());
}
// Evaluate the principal "pattern" found in the record to generate the "evaluated principal"
// by replacing the _HOST and _REALM variables.
String evaluatedPrincipal = principal.replace("_HOST", hostname).replace("_REALM", realm);
ResolvedKerberosKeytab resolvedKeytab = new ResolvedKerberosKeytab(
keytabFilePath,
keytabFileOwnerName,
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
Sets.newHashSet(new ResolvedKerberosPrincipal(
hostId,
hostname,
evaluatedPrincipal,
"service".equalsIgnoreCase(principalType),
null,
serviceName,
componentName,
keytabFilePath
)
),
serviceName.equalsIgnoreCase(RootService.AMBARI.name()),
componentName.equalsIgnoreCase("AMBARI_SERVER_SELF")
);
if (resolvedKeytabs.containsKey(keytabFilePath)) {
ResolvedKerberosKeytab sameKeytab = resolvedKeytabs.get(keytabFilePath);
// validating owner and group
boolean differentOwners = false;
String warnTemplate = "Keytab '{}' on host '{}' has different {}, originally set to '{}' and '{}:{}' has '{}', using '{}'";
if (!resolvedKeytab.getOwnerName().equals(sameKeytab.getOwnerName())) {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "owners", sameKeytab.getOwnerName(),
serviceName, componentName, resolvedKeytab.getOwnerName(),
sameKeytab.getOwnerName());
differentOwners = true;
}
if (!resolvedKeytab.getOwnerAccess().equals(sameKeytab.getOwnerAccess())) {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "owner access", sameKeytab.getOwnerAccess(),
serviceName, componentName, resolvedKeytab.getOwnerAccess(),
sameKeytab.getOwnerAccess());
}
// TODO probably fail on group difference. Some services can inject its principals to same keytab, but
// TODO with different owners, so make sure that keytabs are accessible through group acls
// TODO this includes same group name and group 'r' mode
if (!StringUtils.equals(resolvedKeytab.getGroupName(), sameKeytab.getGroupName())) {
if (differentOwners) {
LOG.error(warnTemplate,
keytabFilePath, hostname, "groups", sameKeytab.getGroupName(),
serviceName, componentName, resolvedKeytab.getGroupName(),
sameKeytab.getGroupName());
} else {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "groups", sameKeytab.getGroupName(),
serviceName, componentName, resolvedKeytab.getGroupName(),
sameKeytab.getGroupName());
}
}
if (!StringUtils.equals(resolvedKeytab.getGroupAccess(), sameKeytab.getGroupAccess())) {
if (differentOwners) {
if (!sameKeytab.getGroupAccess().contains("r")) {
LOG.error("Keytab '{}' on host '{}' referenced by multiple identities which have different owners," +
"but 'r' attribute missing for group. Make sure all users (that need this keytab) are in '{}' +" +
"group and keytab can be read by this group",
keytabFilePath,
hostname,
sameKeytab.getGroupName()
);
}
LOG.error(warnTemplate,
keytabFilePath, hostname, "group access", sameKeytab.getGroupAccess(),
serviceName, componentName, resolvedKeytab.getGroupAccess(),
sameKeytab.getGroupAccess());
} else {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "group access", sameKeytab.getGroupAccess(),
serviceName, componentName, resolvedKeytab.getGroupAccess(),
sameKeytab.getGroupAccess());
}
}
// end validating
// merge principal to keytab
sameKeytab.mergePrincipals(resolvedKeytab);
// ensure that keytab file on ambari-server host creating jass file
if (sameKeytab.isMustWriteAmbariJaasFile() || resolvedKeytab.isMustWriteAmbariJaasFile()) {
sameKeytab.setMustWriteAmbariJaasFile(true);
}
// ensure that this keytab is ambari-keytab, server will distribute it manually
if (sameKeytab.isAmbariServerKeytab() || resolvedKeytab.isAmbariServerKeytab()) {
sameKeytab.setAmbariServerKeytab(true);
}
} else {
resolvedKeytabs.put(keytabFilePath, resolvedKeytab);
LOG.info("Keytab {} owner:'{}:{}', group:'{}:{}' is defined", keytabFilePath,
keytabFileOwnerName, keytabFileOwnerAccess, keytabFileGroupName, keytabFileGroupAccess);
}
// Append an entry to the action data file builder...
// TODO obsolete, move to ResolvedKerberosKeytab
if (kerberosIdentityDataFileWriter != null) {
kerberosIdentityDataFileWriter.writeRecord(
hostname,
serviceName,
componentName,
evaluatedPrincipal,
principalType,
keytabFilePath,
keytabFileOwnerName,
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
"true");
}
// Add the principal-related configuration to the map of configurations
mergeConfiguration(kerberosConfigurations, principalConfiguration, principal, null);
// Add the keytab-related configuration to the map of configurations
mergeConfiguration(kerberosConfigurations, keytabFileConfiguration, keytabFilePath, null);
identitiesAdded++;
}
}
}
}
return identitiesAdded;
}
@Override
public Map<String, Map<String, String>> calculateConfigurations(Cluster cluster, String hostname,
KerberosDescriptor kerberosDescriptor,
boolean includePreconfigureData,
boolean calculateClusterHostInfo)
throws AmbariException {
Map<String, Map<String, String>> calculatedConfigurations = addAdditionalConfigurations(
cluster,
calculateExistingConfigurations(cluster, hostname),
hostname,
(kerberosDescriptor == null) ? null : kerberosDescriptor.getProperties());
if (includePreconfigureData) {
calculatedConfigurations = addConfigurationsForPreProcessedServices(calculatedConfigurations, cluster, kerberosDescriptor, calculateClusterHostInfo);
}
return calculatedConfigurations;
}
private Map<String, String> principalNames(Cluster cluster, Map<String, Map<String, String>> configuration) throws AmbariException {
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, String> each : getKerberosDescriptor(cluster, false).principals().entrySet()) {
result.put(each.getKey(), variableReplacementHelper.replaceVariables(each.getValue(), configuration));
}
return result;
}
@Override
public Map<String, Collection<KerberosIdentityDescriptor>> getActiveIdentities(String clusterName,
String hostName,
String serviceName,
String componentName,
boolean replaceHostNames)
throws AmbariException {
if ((clusterName == null) || clusterName.isEmpty()) {
throw new IllegalArgumentException("Invalid argument, cluster name is required");
}
Cluster cluster = clusters.getCluster(clusterName);
if (cluster == null) {
throw new AmbariException(String.format("The cluster object for the cluster name %s is not available", clusterName));
}
Map<String, Collection<KerberosIdentityDescriptor>> activeIdentities = new HashMap<>();
// Only calculate the active identities if the kerberos-env configurtaion is available. Else
// important information like the realm will be missing (kerberos-env/realm)
Config kerberosEnvConfig = cluster.getDesiredConfigByType(KERBEROS_ENV);
if (kerberosEnvConfig == null) {
LOG.debug("Calculating the active identities for {} is being skipped since the kerberos-env configuration is not available",
clusterName, cluster.getSecurityType().name(), SecurityType.KERBEROS.name());
} else {
Collection<String> hosts;
String ambariServerHostname = StageUtils.getHostName();
if (hostName == null) {
Map<String, Host> hostMap = clusters.getHostsForCluster(clusterName);
if (hostMap == null) {
hosts = Collections.emptySet();
} else {
hosts = hostMap.keySet();
}
if (!hosts.contains(ambariServerHostname)) {
Collection<String> extendedHosts = new ArrayList<>(hosts.size() + 1);
extendedHosts.addAll(hosts);
extendedHosts.add(ambariServerHostname);
hosts = extendedHosts;
}
} else {
hosts = Collections.singleton(hostName);
}
if (!hosts.isEmpty()) {
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
if (kerberosDescriptor != null) {
Set<String> existingServices = cluster.getServices().keySet();
for (String hostname : hosts) {
// Calculate the current host-specific configurations. These will be used to replace
// variables within the Kerberos descriptor data
Map<String, Map<String, String>> configurations = calculateConfigurations(cluster,
hostname,
kerberosDescriptor,
false,
false);
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", configurations);
filterContext.put("services", existingServices);
Map<String, KerberosIdentityDescriptor> hostActiveIdentities = new HashMap<>();
List<KerberosIdentityDescriptor> identities = getActiveIdentities(cluster, hostname,
serviceName, componentName, kerberosDescriptor, filterContext);
if (hostname.equals(ambariServerHostname)) {
// Determine if we should _calculate_ the Ambari service identities.
// If kerberos-env/create_ambari_principal is not set to false the identity should be calculated.
if (createAmbariIdentities(kerberosEnvConfig.getProperties())) {
List<KerberosIdentityDescriptor> ambariIdentities = getAmbariServerIdentities(kerberosDescriptor);
if (ambariIdentities != null) {
identities.addAll(ambariIdentities);
}
}
}
if (!identities.isEmpty()) {
for (KerberosIdentityDescriptor identity : identities) {
KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
String principal = null;
if (principalDescriptor != null) {
principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
}
if (principal != null) {
KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
String keytabFile = null;
if (keytabDescriptor != null) {
keytabFile = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
}
if (replaceHostNames) {
principal = principal.replace("_HOST", hostname);
}
String uniqueKey = String.format("%s|%s", principal, (keytabFile == null) ? "" : keytabFile);
if (!hostActiveIdentities.containsKey(uniqueKey)) {
KerberosPrincipalType principalType = principalDescriptor.getType();
// Assume the principal is a service principal if not specified
if (principalType == null) {
principalType = KerberosPrincipalType.SERVICE;
}
KerberosPrincipalDescriptor resolvedPrincipalDescriptor =
new KerberosPrincipalDescriptor(principal,
principalType,
variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations),
variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
KerberosKeytabDescriptor resolvedKeytabDescriptor;
if (keytabFile == null) {
resolvedKeytabDescriptor = null;
} else {
resolvedKeytabDescriptor =
new KerberosKeytabDescriptor(
keytabFile,
variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations),
keytabDescriptor.isCachable());
}
hostActiveIdentities.put(uniqueKey, new KerberosIdentityDescriptor(
identity.getName(),
identity.getReference(),
resolvedPrincipalDescriptor,
resolvedKeytabDescriptor,
identity.getWhen()));
}
}
}
}
activeIdentities.put(hostname, hostActiveIdentities.values());
}
}
}
}
return activeIdentities;
}
@Override
public List<KerberosIdentityDescriptor> getAmbariServerIdentities(KerberosDescriptor kerberosDescriptor) throws AmbariException {
List<KerberosIdentityDescriptor> ambariIdentities = new ArrayList<>();
KerberosServiceDescriptor ambariKerberosDescriptor = kerberosDescriptor.getService(RootService.AMBARI.name());
if (ambariKerberosDescriptor != null) {
List<KerberosIdentityDescriptor> serviceIdentities = ambariKerberosDescriptor.getIdentities(true, null);
KerberosComponentDescriptor ambariServerKerberosComponentDescriptor = ambariKerberosDescriptor.getComponent(RootComponent.AMBARI_SERVER.name());
if (serviceIdentities != null) {
ambariIdentities.addAll(serviceIdentities);
}
if (ambariServerKerberosComponentDescriptor != null) {
List<KerberosIdentityDescriptor> componentIdentities = ambariServerKerberosComponentDescriptor.getIdentities(true, null);
if (componentIdentities != null) {
ambariIdentities.addAll(componentIdentities);
}
}
}
return ambariIdentities;
}
@Override
public boolean createAmbariIdentities(Map<String, String> kerberosEnvProperties) {
return (kerberosEnvProperties == null) || !"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
}
/**
* Gets the previously stored KDC administrator credential.
* <p/>
* This implementation accesses the secure CredentialStoreService instance to get the data.
*
* @param clusterName the name of the relevant cluster
* @return a PrincipalKeyCredential or null, if the KDC administrator credential is not available
* @throws AmbariException if an error occurs while retrieving the credentials
*/
@Override
public PrincipalKeyCredential getKDCAdministratorCredentials(String clusterName) throws AmbariException {
Credential credentials = credentialStoreService.getCredential(clusterName, KDC_ADMINISTRATOR_CREDENTIAL_ALIAS);
if (credentials instanceof PrincipalKeyCredential) {
return (PrincipalKeyCredential) credentials;
} else {
return null;
}
}
/**
* Creates and saves underlying {@link org.apache.ambari.server.orm.entities.KerberosPrincipalEntity},
* {@link org.apache.ambari.server.orm.entities.KerberosKeytabEntity} entities in JPA storage.
*
* @param resolvedKerberosKeytab kerberos keytab to be persisted
*/
@Override
public void createResolvedKeytab(ResolvedKerberosKeytab resolvedKerberosKeytab) {
if (kerberosKeytabDAO.find(resolvedKerberosKeytab.getFile()) == null) {
KerberosKeytabEntity kke = new KerberosKeytabEntity(resolvedKerberosKeytab.getFile());
kke.setAmbariServerKeytab(resolvedKerberosKeytab.isAmbariServerKeytab());
kke.setWriteAmbariJaasFile(resolvedKerberosKeytab.isMustWriteAmbariJaasFile());
kke.setOwnerName(resolvedKerberosKeytab.getOwnerName());
kke.setOwnerAccess(resolvedKerberosKeytab.getOwnerAccess());
kke.setGroupName(resolvedKerberosKeytab.getGroupName());
kke.setGroupAccess(resolvedKerberosKeytab.getGroupAccess());
kerberosKeytabDAO.create(kke);
}
for (ResolvedKerberosPrincipal principal : resolvedKerberosKeytab.getPrincipals()) {
if (!kerberosPrincipalDAO.exists(principal.getPrincipal())) {
kerberosPrincipalDAO.create(principal.getPrincipal(), principal.isService());
}
for (Map.Entry<String, String> mappingEntry : principal.getServiceMapping().entries()) {
String serviceName = mappingEntry.getKey();
HostEntity hostEntity = principal.getHostId() != null ? hostDAO.findById(principal.getHostId()) : null;
KerberosKeytabEntity kke = kerberosKeytabDAO.find(resolvedKerberosKeytab.getFile());
KerberosKeytabPrincipalEntity kkp = kerberosKeytabPrincipalDAO.findOrCreate(kke, hostEntity, kerberosPrincipalDAO.find(principal.getPrincipal()));
if(kkp.putServiceMapping(serviceName, mappingEntry.getValue())) {
kerberosKeytabPrincipalDAO.merge(kkp);
}
kerberosKeytabDAO.merge(kke);
}
}
}
@Override
public void removeStaleKeytabs(Collection<ResolvedKerberosKeytab> expectedKeytabs) {
}
@Override
public Map<String, Set<String>> translateConfigurationSpecifications(Collection<String> configurationSpecifications) {
Map<String, Set<String>> translation = null;
if (configurationSpecifications != null) {
translation = new HashMap<>();
for (String configurationSpecification : configurationSpecifications) {
Matcher m = KerberosDescriptor.AUTH_TO_LOCAL_PROPERTY_SPECIFICATION_PATTERN.matcher(configurationSpecification);
if (m.matches()) {
String configType = m.group(1);
String propertyName = m.group(2);
if (configType == null) {
configType = "";
}
Set<String> propertyNames = translation.get(configType);
if (propertyNames == null) {
propertyNames = new HashSet<>();
translation.put(configType, propertyNames);
}
propertyNames.add(propertyName);
}
}
}
return translation;
}
/**
* Creates the principal and cached keytab file for the specified identity, if it is determined to
* be of the expected type - user (headless) or service.
* <p/>
* If the identity is not of the expected type, it will be skipped.
*
* @param identityDescriptor the Kerberos identity to process
* @param expectedType the expected principal type
* @param kerberosEnvProperties the kerberos-env properties
* @param kerberosOperationHandler the relevant KerberosOperationHandler
* @param configurations the existing configurations for the cluster
* @param hostname the hostname of the host to create the identity for (nullable)
* @return the relevant keytab data, if successful; otherwise null
* @throws AmbariException
*/
private Keytab createIdentity(KerberosIdentityDescriptor identityDescriptor,
KerberosPrincipalType expectedType, Map<String, String> kerberosEnvProperties,
KerberosOperationHandler kerberosOperationHandler,
Map<String, Map<String, String>> configurations, String hostname)
throws AmbariException {
Keytab keytab = null;
if (identityDescriptor != null) {
KerberosPrincipalDescriptor principalDescriptor = identityDescriptor.getPrincipalDescriptor();
if (principalDescriptor != null) {
// If this principal type is expected, continue, else skip it.
if (expectedType == principalDescriptor.getType()) {
String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
// Replace _HOST with the supplied hostname is either exist
if (!StringUtils.isEmpty(hostname)) {
principal = principal.replace("_HOST", hostname);
}
// If this principal is already in the Ambari database, then don't try to recreate it or it's
// keytab file.
if (!kerberosPrincipalDAO.exists(principal)) {
CreatePrincipalsServerAction.CreatePrincipalResult result;
result = injector.getInstance(CreatePrincipalsServerAction.class).createPrincipal(
principal,
KerberosPrincipalType.SERVICE.equals(expectedType),
kerberosEnvProperties,
kerberosOperationHandler,
false,
null);
if (result == null) {
throw new AmbariException("Failed to create the account for " + principal);
} else {
KerberosKeytabDescriptor keytabDescriptor = identityDescriptor.getKeytabDescriptor();
if (keytabDescriptor != null) {
keytab = injector.getInstance(CreateKeytabFilesServerAction.class).createKeytab(
principal,
result.getPassword(),
result.getKeyNumber(),
kerberosOperationHandler,
true,
true,
null);
if (keytab == null) {
throw new AmbariException("Failed to create the keytab for " + principal);
}
}
}
}
}
}
}
return keytab;
}
/**
* Validate the KDC admin credentials.
*
* @param kerberosDetails the KerberosDetails containing information about the Kerberos configuration
* for the cluster, if null, a new KerberosDetails will be created based on
* information found in the associated cluster
* @param cluster associated cluster
* @throws AmbariException if any other error occurs while trying to validate the credentials
*/
private void validateKDCCredentials(KerberosDetails kerberosDetails, Cluster cluster) throws KerberosMissingAdminCredentialsException,
KerberosAdminAuthenticationException,
KerberosInvalidConfigurationException,
AmbariException {
if (kerberosDetails == null) {
kerberosDetails = getKerberosDetails(cluster, null);
}
if (kerberosDetails.manageIdentities()) {
PrincipalKeyCredential credentials = getKDCAdministratorCredentials(cluster.getClusterName());
if (credentials == null) {
throw new KerberosMissingAdminCredentialsException();
} else {
KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
if (operationHandler == null) {
throw new AmbariException("Failed to get an appropriate Kerberos operation handler.");
} else {
boolean missingCredentials = false;
try {
operationHandler.open(credentials, kerberosDetails.getDefaultRealm(), kerberosDetails.getKerberosEnvProperties());
// todo: this is really odd that open doesn't throw an exception if the credentials are missing
missingCredentials = !operationHandler.testAdministratorCredentials();
} catch (KerberosAdminAuthenticationException e) {
throw new KerberosAdminAuthenticationException(
"Invalid KDC administrator credentials.\n" +
"The KDC administrator credentials must be set as a persisted or temporary credential resource." +
"This may be done by issuing a POST (or PUT for updating) to the /api/v1/clusters/:clusterName/credentials/kdc.admin.credential API entry point with the following payload:\n" +
"{\n" +
" \"Credential\" : {\n" +
" \"principal\" : \"(PRINCIPAL)\", \"key\" : \"(PASSWORD)\", \"type\" : \"(persisted|temporary)\"}\n" +
" }\n" +
"}", e);
} catch (KerberosKDCConnectionException e) {
throw new KerberosInvalidConfigurationException(
"Failed to connect to KDC - " + e.getMessage() + "\n" +
"Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
e);
} catch (KerberosKDCSSLConnectionException e) {
throw new KerberosInvalidConfigurationException(
"Failed to connect to KDC - " + e.getMessage() + "\n" +
"Make sure the server's SSL certificate or CA certificates have been imported into Ambari's truststore.",
e);
} catch (KerberosRealmException e) {
throw new KerberosInvalidConfigurationException(
"Failed to find a KDC for the specified realm - " + e.getMessage() + "\n" +
"Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
e);
} catch (KerberosLDAPContainerException e) {
throw new KerberosInvalidConfigurationException(
"The principal container was not specified\n" +
"Set the 'container_dn' value in the kerberos-env configuration to correct this issue.",
e);
} catch (KerberosOperationException e) {
throw new AmbariException(e.getMessage(), e);
} finally {
try {
operationHandler.close();
} catch (KerberosOperationException e) {
// Ignore this...
}
}
// need to throw this outside of the try/catch so it isn't caught
if (missingCredentials) {
throw new KerberosMissingAdminCredentialsException();
}
}
}
}
}
/**
* Performs operations needed to process Kerberos related tasks on the relevant cluster.
* <p/>
* Iterates through the components installed on the relevant cluster to determine if work
* need to be done. Calls into the Handler implementation to provide guidance and set up stages
* to perform the work needed to complete the relative action.
*
* @param cluster the relevant Cluster
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos configuration
* @param serviceComponentFilter a Map of service names to component names indicating the relevant
* set of services and components - if null, no filter is relevant;
* if empty, the filter indicates no relevant services or components
* @param hostFilter a set of hostname indicating the set of hosts to process -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant hosts
* @param identityFilter a Collection of identity names indicating the relevant identities -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant identities
* @param hostsToForceKerberosOperations a set of host names on which it is expected that the
* Kerberos client is or will be in the INSTALLED state by
* the time the operations targeted for them are to be
* executed - if empty or null, this no hosts will be
* "forced"
* @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
* if null a new RequestStageContainer will be created.
* @param handler a Handler to use to provide guidance and set up stages
* to perform the work needed to complete the relative action
* @return the updated or a new RequestStageContainer containing the stages that need to be
* executed to complete this task; or null if no stages need to be executed.
* @throws AmbariException
* @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
* Kerberos-specific configuration details
*/
@Transactional
RequestStageContainer handle(Cluster cluster,
KerberosDetails kerberosDetails,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter,
Set<String> hostsToForceKerberosOperations,
RequestStageContainer requestStageContainer,
final Handler handler)
throws AmbariException, KerberosOperationException {
final KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
List<ServiceComponentHost> schToProcess = getServiceComponentHostsToProcess(
cluster,
kerberosDescriptor,
serviceComponentFilter,
hostFilter);
// While iterating over all the ServiceComponentHosts find hosts that have KERBEROS_CLIENT
// components in the INSTALLED state and add them to the hostsWithValidKerberosClient Set.
// This is needed to help determine which hosts to perform actions for and create tasks for.
Set<String> hostsWithValidKerberosClient = null;
// Create a temporary directory to store metadata needed to complete this task. Information
// such as which principals and keytabs files to create as well as what configurations need
// to be update are stored in data files in this directory. Any keytab files are stored in
// this directory until they are distributed to their appropriate hosts.
File dataDirectory = null;
// If there are ServiceComponentHosts to process...
if (!schToProcess.isEmpty()) {
validateKDCCredentials(kerberosDetails, cluster);
// Create a temporary directory to store metadata needed to complete this task. Information
// such as which principals and keytabs files to create as well as what configurations need
// to be update are stored in data files in this directory. Any keytab files are stored in
// this directory until they are distributed to their appropriate hosts.
dataDirectory = createTemporaryDirectory();
hostsWithValidKerberosClient = getHostsWithValidKerberosClient(cluster);
// Ensure that that hosts that should be assumed to be in the correct state when needed are
// in the hostsWithValidKerberosClient collection.
if (hostsToForceKerberosOperations != null) {
hostsWithValidKerberosClient.addAll(hostsToForceKerberosOperations);
}
}
// Always set up the necessary stages to perform the tasks needed to complete the operation.
// Some stages may be no-ops, this is expected.
// Gather data needed to create stages and tasks...
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
@Experimental(feature = ExperimentalFeature.MULTI_SERVICE, comment = "The cluster stack id is deprecated")
Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster, cluster.getDesiredStackVersion());
String hostParamsJson = StageUtils.getGson().toJson(hostParams);
String ambariServerHostname = StageUtils.getHostName();
ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
RootComponent.AMBARI_SERVER.name(),
ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
System.currentTimeMillis());
RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
// Use the handler implementation to setup the relevant stages.
handler.createStages(cluster, clusterHostInfoJson,
hostParamsJson, event, roleCommandOrder, kerberosDetails, dataDirectory,
requestStageContainer, schToProcess, serviceComponentFilter, hostFilter, identityFilter,
hostsWithValidKerberosClient);
// Add the finalize stage...
handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
dataDirectory, roleCommandOrder, requestStageContainer, kerberosDetails);
return requestStageContainer;
}
/**
* Performs operations needed to process Kerberos related tasks to manage a (unique) test identity
* on the relevant cluster.
* <p/>
* If Ambari is not managing Kerberos identities, than this method does nothing.
*
* @param cluster the relevant Cluster
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos
* configuration
* @param commandParameters the command parameters map used to read and/or write attributes
* related to this operation
* @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
* if null a new RequestStageContainer will be created.
* @param handler a Handler to use to provide guidance and set up stages
* to perform the work needed to complete the relative action
* @return the updated or a new RequestStageContainer containing the stages that need to be
* executed to complete this task; or null if no stages need to be executed.
* @throws AmbariException
* @throws KerberosOperationException
*/
private RequestStageContainer handleTestIdentity(Cluster cluster,
KerberosDetails kerberosDetails,
Map<String, String> commandParameters, RequestStageContainer requestStageContainer,
Handler handler) throws AmbariException, KerberosOperationException {
if (kerberosDetails.manageIdentities()) {
if (commandParameters == null) {
throw new AmbariException("The properties map must not be null. It is needed to store data related to the service check identity");
}
List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<>();
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
// This is needed to help determine which hosts to perform actions for and create tasks for.
Set<String> hostsWithValidKerberosClient = getHostsWithValidKerberosClient(cluster);
// Create a temporary directory to store metadata needed to complete this task. Information
// such as which principals and keytabs files to create as well as what configurations need
// to be update are stored in data files in this directory. Any keytab files are stored in
// this directory until they are distributed to their appropriate hosts.
File dataDirectory = createTemporaryDirectory();
// Calculate the current non-host-specific configurations. These will be used to replace
// variables within the Kerberos descriptor data
Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
String principal = variableReplacementHelper.replaceVariables("${kerberos-env/service_check_principal_name}@${realm}", configurations);
String keytabFilePath = variableReplacementHelper.replaceVariables("${keytab_dir}/kerberos.service_check.${short_date}.keytab", configurations);
String keytabFileOwnerName = variableReplacementHelper.replaceVariables("${cluster-env/smokeuser}", configurations);
String keytabFileOwnerAccess = "rw";
String keytabFileGroupName = variableReplacementHelper.replaceVariables("${cluster-env/user_group}", configurations);
String keytabFileGroupAccess = "r";
// Add the relevant principal name and keytab file data to the command params state
commandParameters.put("principal_name", principal);
commandParameters.put("keytab_file", keytabFilePath);
try {
// Get a list KERBEROS/KERBEROS_CLIENT ServiceComponentHost objects
List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name());
if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
// Iterate over the KERBEROS_CLIENT service component hosts to get the service and
// component-level Kerberos descriptors in order to determine which principals,
// keytab files needed to be created or updated.
for (ServiceComponentHost sch : serviceComponentHosts) {
if (sch.getState() == State.INSTALLED) {
String hostname = sch.getHostName();
KerberosKeytabEntity kke = kerberosKeytabDAO.find(keytabFilePath);
if (kke == null) {
kke = new KerberosKeytabEntity();
kke.setKeytabPath(keytabFilePath);
kke.setOwnerName(keytabFileOwnerName);
kke.setOwnerAccess(keytabFileOwnerAccess);
kke.setGroupName(keytabFileGroupName);
kke.setGroupAccess(keytabFileGroupAccess);
kerberosKeytabDAO.create(kke);
}
// create principals
if (!kerberosPrincipalDAO.exists(principal)) {
kerberosPrincipalDAO.create(principal, false);
}
KerberosKeytabPrincipalEntity kkp = kerberosKeytabPrincipalDAO.findOrCreate(kke, hostDAO.findById(sch.getHost().getHostId()), kerberosPrincipalDAO.find(principal));
if(kkp.putServiceMapping(sch.getServiceName(), sch.getServiceComponentName())) {
kerberosKeytabPrincipalDAO.merge(kkp);
}
kerberosKeytabDAO.merge(kke);
hostsWithValidKerberosClient.add(hostname);
serviceComponentHostsToProcess.add(sch);
}
}
}
} catch (Exception e) {
// make sure to log what is going wrong
LOG.error("Failed " + e);
throw e;
}
// If there are ServiceComponentHosts to process, make sure the administrator credential
// are available
if (!serviceComponentHostsToProcess.isEmpty()) {
try {
validateKDCCredentials(kerberosDetails, cluster);
} catch (Exception e) {
LOG.error("Cannot validate credentials: " + e);
try {
FileUtils.deleteDirectory(dataDirectory);
} catch (Throwable t) {
LOG.warn(String.format("The data directory (%s) was not deleted due to an error condition - {%s}",
dataDirectory.getAbsolutePath(), t.getMessage()), t);
}
throw e;
}
}
// Always set up the necessary stages to perform the tasks needed to complete the operation.
// Some stages may be no-ops, this is expected.
// Gather data needed to create stages and tasks...
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
@Experimental(feature = ExperimentalFeature.MULTI_SERVICE, comment = "The cluster stack id is deprecated")
Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster, cluster.getDesiredStackVersion());
String hostParamsJson = StageUtils.getGson().toJson(hostParams);
String ambariServerHostname = StageUtils.getHostName();
ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
RootComponent.AMBARI_SERVER.name(),
ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
System.currentTimeMillis());
RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
// Use the handler implementation to setup the relevant stages.
// Set the service/component filter to an empty map since the service/component processing
// was done above.
handler.createStages(cluster,
clusterHostInfoJson, hostParamsJson, event, roleCommandOrder, kerberosDetails,
dataDirectory, requestStageContainer, serviceComponentHostsToProcess,
Collections.singletonMap("KERBEROS", Lists.newArrayList("KERBEROS_CLIENT")),
null, Sets.newHashSet(principal), hostsWithValidKerberosClient);
handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
dataDirectory, roleCommandOrder, requestStageContainer, kerberosDetails);
}
return requestStageContainer;
}
/**
* Gathers the Kerberos-related data from configurations and stores it in a new KerberosDetails
* instance.
*
* @param cluster the relevant Cluster
* @param manageIdentities a Boolean value indicating how to override the configured behavior
* of managing Kerberos identities; if null the configured behavior
* will not be overridden
* @return a new KerberosDetails with the collected configuration data
* @throws AmbariException
*/
private KerberosDetails getKerberosDetails(Cluster cluster, Boolean manageIdentities)
throws KerberosInvalidConfigurationException, AmbariException {
KerberosDetails kerberosDetails = new KerberosDetails();
if (cluster == null) {
String message = "The cluster object is not available";
LOG.error(message);
throw new AmbariException(message);
}
Config configKrb5Conf = cluster.getDesiredConfigByType("krb5-conf");
if (configKrb5Conf == null) {
String message = "The 'krb5-conf' configuration is not available";
LOG.error(message);
throw new AmbariException(message);
}
Map<String, String> krb5ConfProperties = configKrb5Conf.getProperties();
if (krb5ConfProperties == null) {
String message = "The 'krb5-conf' configuration properties are not available";
LOG.error(message);
throw new AmbariException(message);
}
Config configKerberosEnv = cluster.getDesiredConfigByType(KERBEROS_ENV);
if (configKerberosEnv == null) {
String message = "The 'kerberos-env' configuration is not available";
LOG.error(message);
throw new AmbariException(message);
}
Map<String, String> kerberosEnvProperties = configKerberosEnv.getProperties();
if (kerberosEnvProperties == null) {
String message = "The 'kerberos-env' configuration properties are not available";
LOG.error(message);
throw new AmbariException(message);
}
kerberosDetails.setSecurityType(cluster.getSecurityType());
kerberosDetails.setDefaultRealm(kerberosEnvProperties.get(DEFAULT_REALM));
kerberosDetails.setKerberosEnvProperties(kerberosEnvProperties);
// If set, override the manage identities behavior
kerberosDetails.setManageIdentities(manageIdentities);
String kdcTypeProperty = kerberosEnvProperties.get(KDC_TYPE);
if ((kdcTypeProperty == null) && kerberosDetails.manageIdentities()) {
String message = "The 'kerberos-env/kdc_type' value must be set to a valid KDC type";
LOG.error(message);
throw new KerberosInvalidConfigurationException(message);
}
KDCType kdcType;
try {
kdcType = KDCType.translate(kdcTypeProperty);
} catch (IllegalArgumentException e) {
String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
LOG.error(message);
throw new AmbariException(message);
}
// Set the KDCType to the the MIT_KDC as a fallback.
kerberosDetails.setKdcType((kdcType == null) ? KDCType.MIT_KDC : kdcType);
return kerberosDetails;
}
/**
* Creates a temporary directory within the system temporary directory
* <p/>
* The resulting directory is to be removed by the caller when desired.
*
* @return a File pointing to the new temporary directory, or null if one was not created
* @throws AmbariException if a new temporary directory cannot be created
*/
@Override
public File createTemporaryDirectory() throws AmbariException {
try {
File temporaryDirectory = getConfiguredTemporaryDirectory();
File directory;
int tries = 0;
long now = System.currentTimeMillis();
do {
directory = new File(temporaryDirectory, String.format("%s%d-%d.d",
KerberosServerAction.DATA_DIRECTORY_PREFIX, now, tries));
if ((directory.exists()) || !directory.mkdirs()) {
directory = null; // Rest and try again...
} else {
LOG.debug("Created temporary directory: {}", directory.getAbsolutePath());
}
} while ((directory == null) && (++tries < 100));
if (directory == null) {
throw new IOException(String.format("Failed to create a temporary directory in %s", temporaryDirectory));
}
return directory;
} catch (IOException e) {
String message = "Failed to create the temporary data directory.";
LOG.error(message, e);
throw new AmbariException(message, e);
}
}
/**
* Merges the specified configuration property in a map of configuration types.
* The supplied property is processed to replace variables using the replacement Map.
* <p/>
* See {@link VariableReplacementHelper#replaceVariables(String, java.util.Map)}
* for information on variable replacement.
*
* @param configurations the Map of configuration types to update
* @param configurationSpecification the config-type/property_name value specifying the property to set
* @param value the value of the property to set
* @param replacements a Map of (grouped) replacement values
* @throws AmbariException
*/
private void mergeConfiguration(Map<String, Map<String, String>> configurations,
String configurationSpecification,
String value,
Map<String, Map<String, String>> replacements) throws AmbariException {
if (configurationSpecification != null) {
String[] parts = configurationSpecification.split("/");
if (parts.length == 2) {
String type = parts[0];
String property = parts[1];
mergeConfigurations(configurations, type, Collections.singletonMap(property, value), replacements);
}
}
}
/**
* Merges configuration from a Map of configuration updates into a main configurations Map. Each
* property in the updates Map is processed to replace variables using the replacement Map.
* <p/>
* See {@link VariableReplacementHelper#replaceVariables(String, java.util.Map)}
* for information on variable replacement.
*
* @param configurations a Map of configurations
* @param type the configuration type
* @param updates a Map of property updates
* @param replacements a Map of (grouped) replacement values
* @throws AmbariException
*/
private void mergeConfigurations(Map<String, Map<String, String>> configurations, String type,
Map<String, String> updates,
Map<String, Map<String, String>> replacements) throws AmbariException {
if (updates != null) {
Map<String, String> existingProperties = configurations.get(type);
if (existingProperties == null) {
existingProperties = new HashMap<>();
configurations.put(type, existingProperties);
}
for (Map.Entry<String, String> property : updates.entrySet()) {
existingProperties.put(
variableReplacementHelper.replaceVariables(property.getKey(), replacements),
variableReplacementHelper.replaceVariables(property.getValue(), replacements)
);
}
}
}
/**
* Adds identities to the AuthToLocalBuilder.
*
* @param authToLocalBuilder the AuthToLocalBuilder to use to build the auth_to_local mapping
* @param identities a List of KerberosIdentityDescriptors to process
* @param identityFilter a Collection of identity names indicating the relevant identities -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant identities
* @param configurations a Map of configurations to use a replacements for variables
* in identity fields
* @throws org.apache.ambari.server.AmbariException
*/
private void addIdentities(AuthToLocalBuilder authToLocalBuilder,
List<KerberosIdentityDescriptor> identities, Collection<String> identityFilter,
Map<String, Map<String, String>> configurations) throws AmbariException {
if (identities != null) {
for (KerberosIdentityDescriptor identity : identities) {
// If there is no filter or the filter contains the current identity's name...
if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
if (principalDescriptor != null) {
authToLocalBuilder.addRule(
variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations),
variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
}
}
}
}
}
/**
* Creates a temporary file within the system temporary directory
* <p/>
* The resulting file is to be removed by the caller when desired.
*
* @return a File pointing to the new temporary file, or null if one was not created
* @throws AmbariException if a new temporary directory cannot be created
*/
protected File createTemporaryFile() throws AmbariException {
try {
return File.createTempFile("tmp", ".tmp", getConfiguredTemporaryDirectory());
} catch (IOException e) {
String message = "Failed to create a temporary file.";
LOG.error(message, e);
throw new AmbariException(message, e);
}
}
/**
* Gets the configured temporary directory.
*
* @return a File pointing to the configured temporary directory
* @throws IOException
*/
protected File getConfiguredTemporaryDirectory() throws IOException {
String tempDirectoryPath = configuration.getServerTempDir();
if (StringUtils.isEmpty(tempDirectoryPath)) {
tempDirectoryPath = System.getProperty("java.io.tmpdir");
}
if (tempDirectoryPath == null) {
throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
}
return new File(tempDirectoryPath);
}
/**
* Creates a new stage
*
* @param id the new stage's id
* @param cluster the relevant Cluster
* @param requestId the relevant request Id
* @param requestContext a String describing the stage
* @param commandParams JSON-encoded command parameters
* @param hostParams JSON-encoded host parameters
* @return a newly created Stage
*/
private Stage createNewStage(long id, Cluster cluster, long requestId,
String requestContext, String commandParams, String hostParams) {
Stage stage = stageFactory.createNew(requestId,
BASE_LOG_DIR + File.pathSeparator + requestId,
cluster.getClusterName(),
cluster.getClusterId(),
requestContext,
commandParams,
hostParams);
stage.setStageId(id);
return stage;
}
/**
* Given a Collection of ServiceComponentHosts generates a unique list of hosts.
*
* @param serviceComponentHosts a Collection of ServiceComponentHosts from which to to retrieve host names
* @param allowedStates a Set of HostStates to use to filter the list of hosts, if null, no filter is applied
* @return a List of (unique) host names
* @throws org.apache.ambari.server.AmbariException
*/
private List<String> createUniqueHostList(Collection<ServiceComponentHost> serviceComponentHosts, Set<HostState> allowedStates)
throws AmbariException {
Set<String> hostNames = new HashSet<>();
Set<String> visitedHostNames = new HashSet<>();
if (serviceComponentHosts != null) {
for (ServiceComponentHost sch : serviceComponentHosts) {
String hostname = sch.getHostName();
if (!visitedHostNames.contains(hostname)) {
// If allowedStates is null, assume the caller doesnt care about the state of the host
// so skip the call to get the relevant Host data and just add the host to the list
if (allowedStates == null) {
hostNames.add(hostname);
} else {
Host host = clusters.getHost(hostname);
if (allowedStates.contains(host.getState())) {
hostNames.add(hostname);
}
}
visitedHostNames.add(hostname);
}
}
}
return new ArrayList<>(hostNames);
}
@Override
public boolean isClusterKerberosEnabled(Cluster cluster) {
return cluster.getSecurityType() == SecurityType.KERBEROS;
}
@Override
public boolean shouldExecuteCustomOperations(SecurityType requestSecurityType, Map<String, String> requestProperties) {
if (((requestSecurityType == SecurityType.KERBEROS) || (requestSecurityType == SecurityType.NONE)) &&
(requestProperties != null) && !requestProperties.isEmpty()) {
for (SupportedCustomOperation type : SupportedCustomOperation.values()) {
if (requestProperties.containsKey(type.name().toLowerCase())) {
return true;
}
}
}
return false;
}
@Override
public Boolean getManageIdentitiesDirective(Map<String, String> requestProperties) {
String value = (requestProperties == null) ? null : requestProperties.get(DIRECTIVE_MANAGE_KERBEROS_IDENTITIES);
return (value == null)
? null
: !"false".equalsIgnoreCase(value);
}
@Override
public boolean getForceToggleKerberosDirective(Map<String, String> requestProperties) {
return (requestProperties != null) && "true".equalsIgnoreCase(requestProperties.get(DIRECTIVE_FORCE_TOGGLE_KERBEROS));
}
@Override
public Map<String, Map<String, String>> getIdentityConfigurations(List<KerberosIdentityDescriptor> identityDescriptors) {
Map<String, Map<String, String>> map = new HashMap<>();
if (identityDescriptors != null) {
for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
KerberosPrincipalDescriptor principalDescriptor = identityDescriptor.getPrincipalDescriptor();
if (principalDescriptor != null) {
putConfiguration(map, principalDescriptor.getConfiguration(), principalDescriptor.getValue());
}
KerberosKeytabDescriptor keytabDescriptor = identityDescriptor.getKeytabDescriptor();
if (keytabDescriptor != null) {
putConfiguration(map, keytabDescriptor.getConfiguration(), keytabDescriptor.getFile());
}
}
}
return map;
}
/**
* Inserts a configuration property and value into a map of configuration types to property
* name/value pair maps.
*
* @param map the Map to insert into
* @param configuration a configuration property in the form of config-type/property_name
* @param value the value of the configuration property
*/
private void putConfiguration(Map<String, Map<String, String>> map, String configuration, String value) {
if (configuration != null) {
String[] principalTokens = configuration.split("/");
if (principalTokens.length == 2) {
String type = principalTokens[0];
String propertyName = principalTokens[1];
Map<String, String> properties = map.get(type);
if (properties == null) {
properties = new HashMap<>();
map.put(type, properties);
}
properties.put(propertyName, value);
}
}
}
/**
* Returns the active identities for the named service component in the cluster.
*
* @param cluster the relevant cluster (mandatory)
* @param hostname the name of a host for which to find results, null indicates all hosts
* @param serviceName the name of a service for which to find results, null indicates all
* services
* @param componentName the name of a component for which to find results, null indicates all
* components
* @param kerberosDescriptor the relevant Kerberos Descriptor
* requested service component
* @param filterContext the context to use for filtering identities based on the state of the cluster
* @return a list of KerberosIdentityDescriptors representing the active identities for the
* @throws AmbariException if an error occurs processing the cluster's active identities
*/
private List<KerberosIdentityDescriptor> getActiveIdentities(Cluster cluster,
String hostname,
String serviceName,
String componentName,
KerberosDescriptor kerberosDescriptor,
Map<String, Object> filterContext)
throws AmbariException {
List<KerberosIdentityDescriptor> identities = new ArrayList<>();
List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
if (serviceComponentHosts != null) {
for (ServiceComponentHost serviceComponentHost : serviceComponentHosts) {
String schServiceName = serviceComponentHost.getServiceName();
String schComponentName = serviceComponentHost.getServiceComponentName();
if (((serviceName == null) || serviceName.equals(schServiceName)) &&
((componentName == null) || componentName.equals(schComponentName))) {
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(schServiceName);
if (serviceDescriptor != null) {
List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true, filterContext);
if (serviceIdentities != null) {
identities.addAll(serviceIdentities);
}
KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(schComponentName);
if (componentDescriptor != null) {
List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true, filterContext);
if (componentIdentities != null) {
identities.addAll(componentIdentities);
}
}
}
}
}
}
return identities;
}
/**
* Determines the existing configurations for the cluster, related to a given hostname (if provided)
*
* @param cluster the cluster
* @param hostname a hostname
* @return a map of the existing configurations
* @throws AmbariException
*/
private Map<String, Map<String, String>> calculateExistingConfigurations(Cluster cluster, String hostname) throws AmbariException {
// For a configuration type, both tag and an actual configuration can be stored
// Configurations from the tag is always expanded and then over-written by the actual
// global:version1:{a1:A1,b1:B1,d1:D1} + global:{a1:A2,c1:C1,DELETED_d1:x} ==>
// global:{a1:A2,b1:B1,c1:C1}
Map<String, Map<String, String>> configurations = new HashMap<>();
Map<String, Map<String, String>> configurationTags = ambariManagementController.findConfigurationTagsWithOverrides(cluster, hostname);
Map<String, Map<String, String>> configProperties = configHelper.getEffectiveConfigProperties(cluster, configurationTags);
// Apply the configurations saved with the Execution Cmd on top of
// derived configs - This will take care of all the hacks
for (Map.Entry<String, Map<String, String>> entry : configProperties.entrySet()) {
String type = entry.getKey();
Map<String, String> allLevelMergedConfig = entry.getValue();
Map<String, String> configuration = configurations.get(type);
if (configuration == null) {
configuration = new HashMap<>(allLevelMergedConfig);
} else {
Map<String, String> mergedConfig = configHelper.getMergedConfig(allLevelMergedConfig, configuration);
configuration.clear();
configuration.putAll(mergedConfig);
}
configurations.put(type, configuration);
}
return configurations;
}
/**
* Add configurations related to Kerberos, to a previously created map of configurations.
* <p/>
* The supplied map of configurations is expected to be mutable and will be altered.
*
* @param cluster the cluster
* @param configurations a map of configurations
* @param hostname a hostname
* @param kerberosDescriptorProperties the Kerberos descriptor properties
* @return the supplied map of configurations with updates applied
* @throws AmbariException
*/
private Map<String, Map<String, String>> addAdditionalConfigurations(Cluster cluster, Map<String, Map<String, String>> configurations,
String hostname, Map<String, String> kerberosDescriptorProperties)
throws AmbariException {
// A map to hold un-categorized properties. This may come from the KerberosDescriptor
// and will also contain a value for the current host
Map<String, String> generalProperties = configurations.get("");
if (generalProperties == null) {
generalProperties = new HashMap<>();
configurations.put("", generalProperties);
}
// If any properties are set in the calculated KerberosDescriptor, add them into the
// Map of configurations as an un-categorized type (using an empty string)
if (kerberosDescriptorProperties != null) {
generalProperties.putAll(kerberosDescriptorProperties);
}
if (!StringUtils.isEmpty(hostname)) {
// Add the current hostname under "host" and "hostname"
generalProperties.put("host", hostname);
generalProperties.put("hostname", hostname);
}
// Add the current cluster's name
generalProperties.put("cluster_name", cluster.getClusterName());
// Add the current date in short format (MMddyy)
generalProperties.put("short_date", new SimpleDateFormat("MMddyy").format(new Date()));
// add clusterHostInfo config
if (configurations.get("clusterHostInfo") == null) {
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
if (clusterHostInfo != null) {
Map<String, String> componentHosts = new HashMap<>();
clusterHostInfo = StageUtils.substituteHostIndexes(clusterHostInfo);
for (Map.Entry<String, Set<String>> entry : clusterHostInfo.entrySet()) {
componentHosts.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
}
configurations.put("clusterHostInfo", componentHosts);
}
}
configurations.put("principals", principalNames(cluster, configurations));
return configurations;
}
/**
* Creates a deep copy of a map of maps, typically used to copy configuration sets.
*
* @param map the map to copy
* @return a deep copy of the supplied map
*/
private Map<String, Map<String, String>> deepCopy(Map<String, Map<String, String>> map) {
if (map == null) {
return null;
} else {
Map<String, Map<String, String>> copy = new HashMap<>();
for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
Map<String, String> innerMap = entry.getValue();
copy.put(entry.getKey(), (innerMap == null) ? null : new HashMap<>(innerMap));
}
return copy;
}
}
/**
* Get the user-supplied Kerberos descriptor from the set of cluster artifacts
*
* @param cluster the cluster
* @return a Kerberos descriptor
*/
private KerberosDescriptor getKerberosDescriptorUpdates(Cluster cluster) throws AmbariException {
// find instance using name and foreign keys
TreeMap<String, String> foreignKeys = new TreeMap<>();
foreignKeys.put("cluster", String.valueOf(cluster.getClusterId()));
ArtifactEntity entity = artifactDAO.findByNameAndForeignKeys("kerberos_descriptor", foreignKeys);
return (entity == null) ? null : kerberosDescriptorFactory.createInstance(entity.getArtifactData());
}
/**
* Get the default Kerberos descriptor from the specified stack.
*
* @param stackId the relevant stack ID
* @param includePreconfigureData <code>true</code> to include the preconfigure data; otherwise false
* @return a Kerberos Descriptor
* @throws AmbariException if an error occurs while retrieving the Kerberos descriptor
*/
private KerberosDescriptor getKerberosDescriptorFromStack(StackId stackId, boolean includePreconfigureData) throws AmbariException {
return ambariMetaInfo.getKerberosDescriptor(stackId.getStackName(), stackId.getStackVersion(), includePreconfigureData);
// -------------------------------
}
/**
* Recursively walk the Kerberos descriptor tree to find all Kerberos identity definitions and
* determine which should be filtered out.
* <p>
* No actual filtering is performed while processing since any referenced Kerberos identities need
* to be accessible throughout the process. So a map of container path to a list of identities is
* created an returned
*
* @param currentPath
* @param container
* @param context
* @param identitiesToRemove
* @return
* @throws AmbariException
*/
private Map<String, Set<String>> processWhenClauses(String currentPath, AbstractKerberosDescriptorContainer container, Map<String, Object> context, Map<String, Set<String>> identitiesToRemove) throws AmbariException {
// Get the list of this container's identities.
// Do not filter these identities using KerberosIdentityDescriptor#shouldInclude since we will do
// that later.
List<KerberosIdentityDescriptor> identities = container.getIdentities(true, null);
if ((identities != null) && !identities.isEmpty()) {
Set<String> set = null;
for (KerberosIdentityDescriptor identity : identities) {
if (!identity.shouldInclude(context)) {
if (set == null) {
set = new HashSet<>();
identitiesToRemove.put(currentPath, set);
}
set.add(identity.getName());
}
}
}
Collection<? extends AbstractKerberosDescriptorContainer> children = container.getChildContainers();
if (children != null) {
for (AbstractKerberosDescriptorContainer child : children) {
identitiesToRemove = processWhenClauses(currentPath + "/" + child.getName(), child, context, identitiesToRemove);
}
}
return identitiesToRemove;
}
/**
* Processes the configuration values related to a particular Kerberos descriptor identity definition
* by:
* <ol>
* <li>
* merging the declared properties and their values from <code>identityConfigurations</code> with the set of
* Kerberos-related configuration updates in <code>kerberosConfigurations</code>, using the existing cluster
* configurations in <code>configurations</code>
* </li>
* <li>
* ensuring that these properties are not overwritten by recommendations by the stack advisor later
* in the workflow by adding them to the <code>propertiesToIgnore</code> map
* </li>
* </ol>
*
* @param identityConfigurations a map of config-types to property name/value pairs to process
* @param kerberosConfigurations a map of config-types to property name/value pairs to be applied
* as configuration updates
* @param configurations a map of config-types to property name/value pairs representing
* the existing configurations for the cluster
* @param propertiesToIgnore a map of config-types to property names to be ignored while
* processing stack advisor recommendations
* @throws AmbariException
*/
private void processIdentityConfigurations(Map<String, Map<String, String>> identityConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
Map<String, Map<String, String>> configurations,
Map<String, Set<String>> propertiesToIgnore)
throws AmbariException {
if (identityConfigurations != null) {
for (Map.Entry<String, Map<String, String>> identitiyEntry : identityConfigurations.entrySet()) {
String configType = identitiyEntry.getKey();
Map<String, String> properties = identitiyEntry.getValue();
mergeConfigurations(kerberosConfigurations, configType, identitiyEntry.getValue(), configurations);
if ((properties != null) && !properties.isEmpty()) {
Set<String> propertyNames = propertiesToIgnore.get(configType);
if (propertyNames == null) {
propertyNames = new HashSet<>();
propertiesToIgnore.put(configType, propertyNames);
}
propertyNames.addAll(properties.keySet());
}
}
}
}
/**
* Gathers the Kerberos-related configurations for services not yet installed, but flagged to be
* preconfigured.
* <p>
* Only existing configuration types will be updated, new types will not be added since they are
* expected only when the relevant service has been installed. This is to help reduce the number
* of service restarts when new services are added to clusters where Kerberos has been enabled.
* <p>
* If desired, the Stack Advisor will be invoked to request recommended hosts for the component.
* This is needed to fill out the clusterHostInfo structure in the configuration map. For example,
* <code>clusterHostInfo/knox_gateway_hosts</code>
*
* @param configurations the existing configurations (updated in-place)
* @param cluster the cluster
* @param kerberosDescriptor the kerberos descriptor
* @param calculateClusterHostInfo true, to query the Stack Advisor for recommended hosts for the
* preconfigured services and components; false, otherwise
* @return the updated configuration map
* @throws AmbariException if an error occurs
*/
private Map<String, Map<String, String>> addConfigurationsForPreProcessedServices(Map<String, Map<String, String>> configurations,
Cluster cluster,
KerberosDescriptor kerberosDescriptor,
boolean calculateClusterHostInfo)
throws AmbariException {
Map<String, KerberosServiceDescriptor> serviceDescriptorMap = kerberosDescriptor.getServices();
if (serviceDescriptorMap != null) {
Map<String, Service> existingServices = cluster.getServices();
Set<String> allServices = new HashSet<>(existingServices.keySet());
Set<String> componentFilter = new HashSet<>();
StackId stackVersion = cluster.getCurrentStackVersion();
for (KerberosServiceDescriptor serviceDescriptor : serviceDescriptorMap.values()) {
String serviceName = serviceDescriptor.getName();
boolean shouldPreconfigure = serviceDescriptor.shouldPreconfigure();
if (shouldPreconfigure && !existingServices.containsKey(serviceName)) {
if (ambariMetaInfo.isValidService(stackVersion.getStackName(), stackVersion.getStackVersion(), serviceName)) {
ServiceInfo serviceInfo = ambariMetaInfo.getService(stackVersion.getStackName(), stackVersion.getStackVersion(), serviceName);
Collection<PropertyInfo> servicePropertiesInfos = serviceInfo.getProperties();
if (servicePropertiesInfos != null) {
Map<String, Map<String, String>> propertiesToAdd = new HashMap<>();
for (PropertyInfo propertyInfo : servicePropertiesInfos) {
String type = ConfigHelper.fileNameToConfigType(propertyInfo.getFilename());
Map<String, String> map = propertiesToAdd.get(type);
if (map == null) {
map = new HashMap<>();
propertiesToAdd.put(type, map);
}
map.put(propertyInfo.getName(), propertyInfo.getValue());
}
for (Map.Entry<String, Map<String, String>> entry : propertiesToAdd.entrySet()) {
if (!configurations.containsKey(entry.getKey())) {
configurations.put(entry.getKey(), entry.getValue());
}
}
}
// This is only needed if the Stack Advisor is being called to get recommended host
// for components
if (calculateClusterHostInfo) {
// Add the service to preconfigure to the all services set for use later
allServices.add(serviceName);
// Add the components for the service to preconfigure to the component filter
List<ComponentInfo> componentInfos = serviceInfo.getComponents();
if (componentInfos != null) {
for (ComponentInfo componentInfo : componentInfos) {
componentFilter.add(componentInfo.getName());
}
}
}
}
}
}
if (calculateClusterHostInfo && (allServices.size() > existingServices.size())) {
applyStackAdvisorHostRecommendations(cluster, allServices, componentFilter, configurations);
}
}
return configurations;
}
/**
* Combines a stack-level Kerberos descriptor with a user-suppled Kerberos descriptor to creae a
* composite {@link KerberosDescriptor} using the following logic:
* <p>
* <ul>
* <li>
* If both the stack-level and the user-supplied Kerberos descriptors are <code>null</code>,
* return an empty {@link KerberosDescriptor}.
* </li>
* <li>
* If the stack-level Kerberos descriptor is <code>null</code> and the user-supplied Kerberos
* descriptor is <code>non-null</code>, return the user-supplied Kerberos descriptor.
* </li>
* <li>
* If the stack-level Kerberos descriptor is <code>non-null</code> and the user-supplied
* Kerberos descriptor is <code>null</code>, return the stack-level Kerberos descriptor.
* </li>
* <li>
* If neither the stack-level nor the user-supplied Kerberos descriptors are <code>null</code>,
* return the stack-level Kerberos descriptor that has been updated using data from the
* user-supplied Kerberos descriptor.
* </li>
* </ul>
*
* @param stackDescriptor the stack-level Keberos descriptor
* @param userDescriptor the user-supplied Kerberos descriptor
* @return a KerberosDescriptor
*/
private KerberosDescriptor combineKerberosDescriptors(KerberosDescriptor stackDescriptor, KerberosDescriptor userDescriptor) {
KerberosDescriptor kerberosDescriptor;
if (stackDescriptor == null) {
if (userDescriptor == null) {
return new KerberosDescriptor(); // return an empty Kerberos descriptor since we have no data
} else {
kerberosDescriptor = userDescriptor;
}
} else {
if (userDescriptor != null) {
stackDescriptor.update(userDescriptor);
}
kerberosDescriptor = stackDescriptor;
}
return kerberosDescriptor;
}
/* ********************************************************************************************
* Helper classes and enums
* ******************************************************************************************** *\
/**
* A enumeration of the supported custom operations
*/
public enum SupportedCustomOperation {
REGENERATE_KEYTABS
}
/**
* Handler is an interface that needs to be implemented by toggle handler classes to do the
* "right" thing for the task at hand.
*/
private abstract class Handler {
/**
* If (@code true}, allows stages and tasks created with the handler to be
* retried instead of outright failing a task.
*
* @see KerberosHelper#ALLOW_RETRY
*/
protected boolean retryAllowed = false;
/**
* Sets whether tasks created as part of this handler can be retry if they fail. If a task
* cannot be retried it will fail the entire request.
*
* @param retryAllowed
*/
void setRetryAllowed(boolean retryAllowed) {
this.retryAllowed = retryAllowed;
}
/**
* Creates the necessary stages to complete the relevant task and stores them in the supplied
* or a newly created RequestStageContainer.
* <p/>
* If the supplied RequestStageContainer is null, a new one must be created and filled.
* {@link org.apache.ambari.server.controller.internal.RequestStageContainer#persist()} should
* not be called since it is not known if the set of states for this container is complete.
*
* @param cluster the relevant Cluster
* @param clusterHostInfo JSON-encoded clusterHostInfo structure
* @param hostParams JSON-encoded host parameters
* @param event a ServiceComponentHostServerActionEvent to pass to any created tasks
* @param roleCommandOrder the RoleCommandOrder to use to generate the RoleGraph for any newly created Stages
* @param kerberosDetails a KerberosDetails containing the information about the relevant Kerberos configuration
* @param dataDirectory a File pointing to the (temporary) data directory
* @param requestStageContainer a RequestStageContainer to store the new stages in, if null a
* new RequestStageContainer will be created
* @param serviceComponentHosts a List of ServiceComponentHosts that needs to be updated as part of this operation
* @param serviceComponentFilter a Map of service names to component names indicating the relevant
* set of services and components - if null, no filter is relevant;
* if empty, the filter indicates no relevant services or components
* @param hostFilter a set of hostname indicating the set of hosts to process -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant hosts
* @param identityFilter a Collection of identity names indicating the relevant identities -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant identities
* @return the last stage id generated, or -1 if no stages were created
* @throws AmbariException if an error occurs while creating the relevant stages
*/
abstract long createStages(Cluster cluster,
String clusterHostInfo, String hostParams,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder,
KerberosDetails kerberosDetails, File dataDirectory,
RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter,
Set<String> hostsWithValidKerberosClient)
throws AmbariException;
public void addPrepareEnableKerberosOperationsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Preparing Operations",
"{}",
hostParamsJson,
PrepareEnableKerberosServerAction.class,
event,
commandParameters,
"Preparing Operations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addPrepareKerberosIdentitiesStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Preparing Operations",
"{}",
hostParamsJson,
PrepareKerberosIdentitiesServerAction.class,
event,
commandParameters,
"Preparing Operations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addPrepareDisableKerberosOperationsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Preparing Operations",
"{}",
hostParamsJson,
PrepareDisableKerberosServerAction.class,
event,
commandParameters,
"Preparing Operations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addCreatePrincipalsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Create Principals",
"{}",
hostParamsJson,
CreatePrincipalsServerAction.class,
event,
commandParameters,
"Create Principals",
Math.max(ServerAction.DEFAULT_LONG_RUNNING_TASK_TIMEOUT_SECONDS, configuration.getDefaultServerTaskTimeout()));
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addDestroyPrincipalsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Destroy Principals",
"{}",
hostParamsJson,
DestroyPrincipalsServerAction.class,
event,
commandParameters,
"Destroy Principals",
Math.max(ServerAction.DEFAULT_LONG_RUNNING_TASK_TIMEOUT_SECONDS, configuration.getDefaultServerTaskTimeout()));
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addConfigureAmbariIdentityStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Configure Ambari Identity",
"{}",
hostParamsJson,
ConfigureAmbariIdentitiesServerAction.class,
event,
commandParameters,
"Configure Ambari Identity",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addCreateKeytabFilesStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Create Keytabs",
"{}",
hostParamsJson,
CreateKeytabFilesServerAction.class,
event,
commandParameters,
"Create Keytabs",
Math.max(ServerAction.DEFAULT_LONG_RUNNING_TASK_TIMEOUT_SECONDS, configuration.getDefaultServerTaskTimeout()));
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
void addDistributeKeytabFilesStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
List<String> hosts)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Distribute Keytabs",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
if (!hosts.isEmpty()) {
Map<String, String> requestParams = new HashMap<>();
ActionExecutionContext actionExecContext = createActionExecutionContext(
cluster.getClusterName(),
SET_KEYTAB,
createRequestResourceFilters(hosts),
requestParams,
retryAllowed);
customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage,
requestParams, null);
} else {
LOG.warn("Skipping {} command. No suitable hosts found", SET_KEYTAB);
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
/**
* Send a custom command to the KERBEROS_CLIENT to check if there are missing keytabs on each hosts.
*/
void addCheckMissingKeytabsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
List<String> hostsToInclude)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Checking keytabs",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
if (!hostsToInclude.isEmpty()) {
Map<String, String> requestParams = new HashMap<>();
ActionExecutionContext actionExecContext = createActionExecutionContext(
cluster.getClusterName(),
CHECK_KEYTABS,
createRequestResourceFilters(hostsToInclude),
requestParams,
retryAllowed);
customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestParams, null);
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
void addDisableSecurityHookStage(Cluster cluster,
String clusterHostInfoJson,
String hostParamsJson,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Disable security",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
addDisableSecurityCommandToAllServices(cluster, stage);
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
private void addDisableSecurityCommandToAllServices(Cluster cluster, Stage stage) throws AmbariException {
for (Service service : cluster.getServices().values()) {
for (ServiceComponent component : service.getServiceComponents().values()) {
if (!component.getServiceComponentHosts().isEmpty()) {
String firstHost = component.getServiceComponentHosts().keySet().iterator().next(); // it is only necessary to send it to one host
ActionExecutionContext exec = new ActionExecutionContext(
cluster.getClusterName(),
"DISABLE_SECURITY",
singletonList(new RequestResourceFilter(service.getName(), component.getName(), singletonList(firstHost))),
Collections.emptyMap());
customCommandExecutionHelper.addExecutionCommandsToStage(exec, stage, Collections.emptyMap(), null);
}
}
}
}
void addStopZookeeperStage(Cluster cluster,
String clusterHostInfoJson,
String hostParamsJson,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer)
throws AmbariException {
Service zookeeper;
try {
zookeeper = cluster.getService("ZOOKEEPER");
} catch (ServiceNotFoundException e) {
return;
}
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Stopping ZooKeeper",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
for (ServiceComponent component : zookeeper.getServiceComponents().values()) {
Set<String> hosts = component.getServiceComponentHosts().keySet();
ActionExecutionContext exec = new ActionExecutionContext(
cluster.getClusterName(),
"STOP",
singletonList(new RequestResourceFilter(zookeeper.getName(), component.getName(), new ArrayList<>(hosts))),
Collections.emptyMap());
customCommandExecutionHelper.addExecutionCommandsToStage(exec, stage, Collections.emptyMap(), null);
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addDeleteKeytabFilesStage(Cluster cluster, List<ServiceComponentHost> serviceComponentHosts,
String clusterHostInfoJson, String hostParamsJson,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
Set<String> hostsWithValidKerberosClient)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Delete Keytabs",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
Collection<ServiceComponentHost> filteredComponents = filterServiceComponentHostsForHosts(
new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
if (!filteredComponents.isEmpty()) {
List<String> hostsToUpdate = createUniqueHostList(filteredComponents, Collections.singleton(HostState.HEALTHY));
if (!hostsToUpdate.isEmpty()) {
Map<String, String> requestParams = new HashMap<>();
List<RequestResourceFilter> requestResourceFilters = new ArrayList<>();
RequestResourceFilter reqResFilter = new RequestResourceFilter("KERBEROS", "KERBEROS_CLIENT", hostsToUpdate);
requestResourceFilters.add(reqResFilter);
ActionExecutionContext actionExecContext = new ActionExecutionContext(
cluster.getClusterName(),
REMOVE_KEYTAB,
requestResourceFilters,
requestParams);
customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage,
requestParams, null);
}
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addUpdateConfigurationsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Update Configurations",
"{}",
hostParamsJson,
UpdateKerberosConfigsServerAction.class,
event,
commandParameters,
"Update Service Configurations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addFinalizeOperationStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
File dataDirectory, RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
KerberosDetails kerberosDetails)
throws AmbariException {
// Add the finalize stage...
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Finalize Operations",
"{}",
hostParamsJson,
FinalizeKerberosServerAction.class,
event,
commandParameters,
"Finalize Operations", 300);
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addCleanupStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Kerberization Clean Up",
"{}",
hostParamsJson,
CleanupServerAction.class,
event,
commandParameters,
"Kerberization Clean Up",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
private List<RequestResourceFilter> createRequestResourceFilters(List<String> hostsToInclude) {
List<RequestResourceFilter> requestResourceFilters = new ArrayList<>();
RequestResourceFilter reqResFilter = new RequestResourceFilter(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name(), hostsToInclude);
requestResourceFilters.add(reqResFilter);
return requestResourceFilters;
}
/**
* Creates a new stage with a single task describing the ServerAction class to invoke and the other
* task-related information.
*
* @param id the new stage's id
* @param cluster the relevant Cluster
* @param requestId the relevant request Id
* @param requestContext a String describing the stage
* @param commandParams JSON-encoded command parameters
* @param hostParams JSON-encoded host parameters
* @param actionClass The ServeAction class that implements the action to invoke
* @param event The relevant ServiceComponentHostServerActionEvent
* @param commandParameters a Map of command parameters to attach to the task added to the new
* stage
* @param commandDetail a String declaring a descriptive name to pass to the action - null or an
* empty string indicates no value is to be set
* @param timeout the timeout for the task/action @return a newly created Stage
*/
private Stage createServerActionStage(long id, Cluster cluster, long requestId,
String requestContext,
String commandParams, String hostParams,
Class<? extends ServerAction> actionClass,
ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters, String commandDetail,
Integer timeout) throws AmbariException {
Stage stage = createNewStage(id, cluster, requestId, requestContext, commandParams, hostParams);
stage.addServerActionCommand(actionClass.getName(), null, Role.AMBARI_SERVER_ACTION,
RoleCommand.EXECUTE, cluster.getClusterName(), event, commandParameters, commandDetail,
ambariManagementController.findConfigurationTagsWithOverrides(cluster, null), timeout,
retryAllowed, false);
return stage;
}
/**
* Creates an {@link ActionExecutionContext} where some of the common values are pre-initialized.
*
* @param clusterName
* @param commandName
* @param resourceFilters
* @param parameters
* @param retryAllowed
* @return
*/
private ActionExecutionContext createActionExecutionContext(String clusterName,
String commandName, List<RequestResourceFilter> resourceFilters,
Map<String, String> parameters, boolean retryAllowed) {
ActionExecutionContext actionExecContext = new ActionExecutionContext(clusterName, SET_KEYTAB,
resourceFilters, parameters);
actionExecContext.setRetryAllowed(retryAllowed);
return actionExecContext;
}
}
/**
* EnableKerberosHandler is an implementation of the Handler interface used to enable Kerberos
* on the relevant cluster
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>create principals</li>
* <li>create keytab files</li>
* <li>distribute keytab files to the appropriate hosts</li>
* <li>update relevant configurations</li>
* </ol>
*/
private class EnableKerberosHandler extends Handler {
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient)
throws AmbariException {
// If there are principals, keytabs, and configurations to process, setup the following sages:
// 1) prepare identities
// 2) generate principals
// 3) generate keytab files
// 4) distribute keytab files
// 5) update configurations
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Enabling Kerberos");
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATIONS, "true");
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
commandParameters.put(KerberosServerAction.INCLUDE_AMBARI_IDENTITY, (kerberosDetails.createAmbariPrincipal()) ? "true" : "false");
commandParameters.put(KerberosServerAction.PRECONFIGURE_SERVICES, kerberosDetails.getPreconfigureServices());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
// *****************************************************************
// Create stage to prepare operations
addPrepareEnableKerberosOperationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
if (kerberosDetails.manageIdentities()) {
List<String> hostsToInclude = calculateHosts(cluster, serviceComponentHosts, hostsWithValidKerberosClient, false);
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
// *****************************************************************
// Create stage to create principals
addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to generate keytabs
addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to distribute and configure keytab for Ambari server and configure JAAS
if (kerberosDetails.createAmbariPrincipal()) {
addConfigureAmbariIdentityStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
}
// *****************************************************************
// Create stage to distribute keytabs
addDistributeKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer, hostsToInclude);
}
// *****************************************************************
// Create stage to update configurations of services
addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
return requestStageContainer.getLastStageId();
}
}
/**
* DisableKerberosHandler is an implementation of the Handler interface used to disable Kerberos
* on the relevant cluster
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>update relevant configurations</li>
* <li>delete keytab files</li>
* <li>remove principals</li>
* <li>restart services</li>
* </ol>
*/
private class DisableKerberosHandler extends Handler {
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter, Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient) throws AmbariException {
// 1) revert configurations
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Disabling Kerberos");
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATIONS, "true");
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
addDisableSecurityHookStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer);
addStopZookeeperStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to prepare operations
addPrepareDisableKerberosOperationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to update configurations of services
addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
if (kerberosDetails.manageIdentities()) {
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
// *****************************************************************
// Create stage to remove principals
addDestroyPrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to delete keytabs
addDeleteKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson,
hostParamsJson, commandParameters, roleCommandOrder, requestStageContainer, hostsWithValidKerberosClient);
}
// *****************************************************************
// Create stage to perform data cleanups (e.g. kerberos descriptor artifact database leftovers)
addCleanupStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
return requestStageContainer.getLastStageId();
}
}
private JsonObject serviceFilterToJsonObject(Map<String, ? extends Collection<String>> serviceComponentFilter) {
Object test = StageUtils.getGson().toJson(serviceComponentFilter);
if (serviceComponentFilter != null) {
JsonObject serviceFilter = new JsonObject();
for (Map.Entry<String, ? extends Collection<String>> filterEntry : serviceComponentFilter.entrySet()) {
if (filterEntry.getValue() != null) {
JsonArray components = new JsonArray();
for (String component : filterEntry.getValue()) {
components.add(new JsonPrimitive(component));
}
serviceFilter.add(filterEntry.getKey(), components);
} else {
serviceFilter.add(filterEntry.getKey(), null);
}
}
return serviceFilter;
}
return null;
}
/**
* CreatePrincipalsAndKeytabsHandler is an implementation of the Handler interface used to create
* principals and keytabs and distribute them throughout the cluster. This is similar to enabling
* Kerberos however no states or configurations will be updated.
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>create principals</li>
* <li>create keytab files</li>
* <li>distribute keytab files to the appropriate hosts</li>
* </ol>
*/
private class CreatePrincipalsAndKeytabsHandler extends Handler {
/**
* The type of Kerberos operation being performed.
*
* @see org.apache.ambari.server.serveraction.kerberos.KerberosServerAction.OperationType
*/
private KerberosServerAction.OperationType operationType;
/**
* A boolean value indicating whether to update service configurations (<code>true</code>)
* or ignore any potential configuration changes (<code>false</code>).
*/
private boolean updateConfigurations;
/**
* A boolean value indicating whether to include all hosts (<code>true</code>) when setting up
* agent-side tasks or to select only the hosts found to be relevant (<code>false</code>).
* <p>
* This is useful if we do not know beforehand, which hosts need to be involved in the operation.
*/
private boolean forceAllHosts;
/**
* A boolean value indicating whether to include Ambari server identity (<code>true</code>)
* or ignore it (<code>false</code>).
*/
private boolean includeAmbariIdentity;
/**
* CreatePrincipalsAndKeytabsHandler constructor to set whether this instance should be used to
* regenerate all keytabs or just the ones that have not been distributed
*
* @param operationType The type of Kerberos operation being performed
* @param updateConfigurations A boolean value indicating whether to update service configurations
* (<code>true</code>) or ignore any potential configuration changes
* @param forceAllHosts A boolean value indicating whether to include all hosts (<code>true</code>)
* when setting up agent-side tasks or to select only the hosts found to be
* relevant (<code>false</code>)
* @param includeAmbariIdentity A boolean value indicating whether to include Ambari server
* identity (<code>true</code>) or ignore it (<code>false</code>)
*/
CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType operationType, boolean updateConfigurations,
boolean forceAllHosts, boolean includeAmbariIdentity) {
this.operationType = operationType;
this.updateConfigurations = updateConfigurations;
this.forceAllHosts = forceAllHosts;
this.includeAmbariIdentity = includeAmbariIdentity;
}
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient)
throws AmbariException {
// If there are principals and keytabs to process, setup the following sages:
// 1) prepare identities
// 2) generate principals
// 3) generate keytab files
// 4) distribute keytab files
// 5) update configurations (optional)
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
boolean processAmbariIdentity = includeAmbariIdentity;
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
processAmbariIdentity = serviceComponentFilter.containsKey(RootService.AMBARI.name()) &&
((serviceComponentFilter.get(RootService.AMBARI.name()) == null) || serviceComponentFilter.get(RootService.AMBARI.name()).contains("*") || serviceComponentFilter.get("AMBARI").contains(RootComponent.AMBARI_SERVER.name()));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
processAmbariIdentity = hostFilter.contains("*") || hostFilter.contains(StageUtils.getHostName());
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
commandParameters.put(KerberosServerAction.OPERATION_TYPE, (operationType == null) ? KerberosServerAction.OperationType.DEFAULT.name() : operationType.name());
commandParameters.put(KerberosServerAction.INCLUDE_AMBARI_IDENTITY, (processAmbariIdentity) ? "true" : "false");
if (updateConfigurations) {
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Updated Kerberos-related configurations");
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATIONS, "true");
}
List<String> hostsToInclude = calculateHosts(cluster, serviceComponentHosts, hostsWithValidKerberosClient, forceAllHosts);
// *****************************************************************
// Create stage to create principals
addPrepareKerberosIdentitiesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
if (kerberosDetails.manageIdentities()) {
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
if (operationType != KerberosServerAction.OperationType.RECREATE_ALL) {
addCheckMissingKeytabsStage(cluster, clusterHostInfoJson, hostParamsJson,
commandParameters, roleCommandOrder, requestStageContainer, hostsToInclude);
}
// *****************************************************************
// Create stage to create principals
addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to generate keytabs
addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to distribute and configure keytab for Ambari server and configure JAAS
if (processAmbariIdentity && kerberosDetails.createAmbariPrincipal()) {
addConfigureAmbariIdentityStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
}
// *****************************************************************
// Create stage to distribute keytabs
addDistributeKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer, hostsToInclude);
}
if (updateConfigurations) {
// *****************************************************************
// Create stage to update configurations of services
addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
}
return requestStageContainer.getLastStageId();
}
}
/**
* Filter out ServiceComponentHosts that are on on hosts in the specified set of host names.
* <p/>
* It is expected that the supplied collection is modifiable. It will be modified inplace.
*
* @param serviceComponentHosts a collection of ServiceComponentHost items to test
* @param hosts a set of host names indicating valid hosts
* @return a collection of filtered ServiceComponentHost items
*/
private Collection<ServiceComponentHost> filterServiceComponentHostsForHosts(Collection<ServiceComponentHost> serviceComponentHosts,
Set<String> hosts) {
if ((serviceComponentHosts != null) && (hosts != null)) {
Iterator<ServiceComponentHost> iterator = serviceComponentHosts.iterator();
while (iterator.hasNext()) {
ServiceComponentHost sch = iterator.next();
if (!hosts.contains(sch.getHostName())) {
iterator.remove();
}
}
}
return serviceComponentHosts;
}
/**
* Calculate the hosts to include when issuing agent-side commands.
* <p>
* If forcing all hosts, select only the healthy hosts in the cluster else select only the healthy
* hosts from the set of hosts specified in the collection of relevant {@link ServiceComponentHost}.
*
* @param cluster the cluster
* @param serviceComponentHosts a collction of {@link ServiceComponentHost}s that are
* relevant to the current operation
* @param hostsWithValidKerberosClient the collection of hosts know to have the Kerberos client
* component installed
* @param forceAllHosts true to process all hosts from the cluster rather than use
* the hosts parsed from the set of {@link ServiceComponentHost}s
* @return a filtered list of host names
* @throws AmbariException
*/
private List<String> calculateHosts(Cluster cluster, List<ServiceComponentHost> serviceComponentHosts, Set<String> hostsWithValidKerberosClient, boolean forceAllHosts) throws AmbariException {
if (forceAllHosts) {
List<String> hosts = new ArrayList<>();
Collection<Host> clusterHosts = cluster.getHosts();
if (!CollectionUtils.isEmpty(clusterHosts)) {
for (Host host : clusterHosts) {
if (host.getState() == HostState.HEALTHY) {
hosts.add(host.getHostName());
}
}
}
return hosts;
} else {
Collection<ServiceComponentHost> filteredComponents = filterServiceComponentHostsForHosts(
new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
if (filteredComponents.isEmpty()) {
return Collections.emptyList();
} else {
return createUniqueHostList(filteredComponents, Collections.singleton(HostState.HEALTHY));
}
}
}
/**
* DeletePrincipalsAndKeytabsHandler is an implementation of the Handler interface used to delete
* principals and keytabs throughout the cluster.
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>delete principals</li>
* <li>remove keytab files</li>
* </ol>
*/
private class DeletePrincipalsAndKeytabsHandler extends Handler {
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter, Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient)
throws AmbariException {
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
if (kerberosDetails.manageIdentities()) {
// If there are principals and keytabs to process, setup the following sages:
// 1) prepare
// 2) delete principals
// 3) delete keytab files
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
// *****************************************************************
// Create stage to create principals
addPrepareKerberosIdentitiesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to delete principals
addDestroyPrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to delete keytabs
addDeleteKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson,
hostParamsJson, commandParameters, roleCommandOrder, requestStageContainer, hostsWithValidKerberosClient);
}
return requestStageContainer.getLastStageId();
}
}
/**
* KerberosDetails is a helper class to hold the details of the relevant Kerberos-specific
* configurations so they may be passed around more easily.
*/
private static class KerberosDetails {
private String defaultRealm;
private KDCType kdcType;
private Map<String, String> kerberosEnvProperties;
private SecurityType securityType;
private Boolean manageIdentities;
public void setDefaultRealm(String defaultRealm) {
this.defaultRealm = defaultRealm;
}
public String getDefaultRealm() {
return defaultRealm;
}
public void setKdcType(KDCType kdcType) {
this.kdcType = kdcType;
}
public KDCType getKdcType() {
return kdcType;
}
public void setKerberosEnvProperties(Map<String, String> kerberosEnvProperties) {
this.kerberosEnvProperties = kerberosEnvProperties;
}
public Map<String, String> getKerberosEnvProperties() {
return kerberosEnvProperties;
}
public void setSecurityType(SecurityType securityType) {
this.securityType = securityType;
}
public SecurityType getSecurityType() {
return securityType;
}
public boolean manageIdentities() {
if (manageIdentities == null) {
return (kerberosEnvProperties == null) ||
!"false".equalsIgnoreCase(kerberosEnvProperties.get(MANAGE_IDENTITIES));
} else {
return manageIdentities;
}
}
public void setManageIdentities(Boolean manageIdentities) {
this.manageIdentities = manageIdentities;
}
public boolean createAmbariPrincipal() {
return (kerberosEnvProperties == null) ||
!"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
}
public String getPreconfigureServices() {
return (kerberosEnvProperties == null) ? "" : kerberosEnvProperties.get(PRECONFIGURE_SERVICES);
}
}
}
|
ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller;
import static java.util.Collections.singletonList;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Matcher;
import org.apache.ambari.annotations.Experimental;
import org.apache.ambari.annotations.ExperimentalFeature;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.actionmanager.RequestFactory;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.actionmanager.StageFactory;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorHelper;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
import org.apache.ambari.server.controller.internal.RequestStageContainer;
import org.apache.ambari.server.controller.utilities.KerberosChecker;
import org.apache.ambari.server.metadata.RoleCommandOrder;
import org.apache.ambari.server.orm.dao.ArtifactDAO;
import org.apache.ambari.server.orm.dao.HostDAO;
import org.apache.ambari.server.orm.dao.KerberosKeytabDAO;
import org.apache.ambari.server.orm.dao.KerberosKeytabPrincipalDAO;
import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
import org.apache.ambari.server.orm.entities.ArtifactEntity;
import org.apache.ambari.server.orm.entities.HostEntity;
import org.apache.ambari.server.orm.entities.KerberosKeytabEntity;
import org.apache.ambari.server.orm.entities.KerberosKeytabPrincipalEntity;
import org.apache.ambari.server.security.credential.Credential;
import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
import org.apache.ambari.server.security.encryption.CredentialStoreService;
import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
import org.apache.ambari.server.serveraction.kerberos.Component;
import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIdentitiesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
import org.apache.ambari.server.serveraction.kerberos.FinalizeKerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.KDCType;
import org.apache.ambari.server.serveraction.kerberos.KerberosAdminAuthenticationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriter;
import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosKDCConnectionException;
import org.apache.ambari.server.serveraction.kerberos.KerberosKDCSSLConnectionException;
import org.apache.ambari.server.serveraction.kerberos.KerberosLDAPContainerException;
import org.apache.ambari.server.serveraction.kerberos.KerberosMissingAdminCredentialsException;
import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandler;
import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandlerFactory;
import org.apache.ambari.server.serveraction.kerberos.KerberosRealmException;
import org.apache.ambari.server.serveraction.kerberos.KerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.PrepareDisableKerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.PrepareEnableKerberosServerAction;
import org.apache.ambari.server.serveraction.kerberos.PrepareKerberosIdentitiesServerAction;
import org.apache.ambari.server.serveraction.kerberos.UpdateKerberosConfigsServerAction;
import org.apache.ambari.server.serveraction.kerberos.stageutils.ResolvedKerberosKeytab;
import org.apache.ambari.server.serveraction.kerberos.stageutils.ResolvedKerberosPrincipal;
import org.apache.ambari.server.stageplanner.RoleGraph;
import org.apache.ambari.server.stageplanner.RoleGraphFactory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostState;
import org.apache.ambari.server.state.PropertyInfo;
import org.apache.ambari.server.state.SecurityType;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.State;
import org.apache.ambari.server.state.ValueAttributesInfo;
import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosPrincipalType;
import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.kerberos.VariableReplacementHelper;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Singleton;
import com.google.inject.persist.Transactional;
@Singleton
public class KerberosHelperImpl implements KerberosHelper {
public static final String BASE_LOG_DIR = "/tmp/ambari";
private static final Logger LOG = LoggerFactory.getLogger(KerberosHelperImpl.class);
/**
* The set of states a component may be in, indicating that is have been previously installed on
* the cluster.
* <p>
* These values are important when trying to determine the state of the cluster when adding new components
*/
private static final Set<State> PREVIOUSLY_INSTALLED_STATES = EnumSet.of(State.INSTALLED, State.STARTED, State.DISABLED);
public static final String CHECK_KEYTABS = "CHECK_KEYTABS";
public static final String SET_KEYTAB = "SET_KEYTAB";
public static final String REMOVE_KEYTAB = "REMOVE_KEYTAB";
@Inject
private AmbariCustomCommandExecutionHelper customCommandExecutionHelper;
@Inject
private AmbariManagementController ambariManagementController;
@Inject
private AmbariMetaInfo ambariMetaInfo;
@Inject
private ActionManager actionManager;
@Inject
private RequestFactory requestFactory;
@Inject
private StageFactory stageFactory;
@Inject
private RoleGraphFactory roleGraphFactory;
@Inject
private Clusters clusters;
@Inject
private ConfigHelper configHelper;
@Inject
private VariableReplacementHelper variableReplacementHelper;
@Inject
private Configuration configuration;
@Inject
private KerberosOperationHandlerFactory kerberosOperationHandlerFactory;
@Inject
private KerberosDescriptorFactory kerberosDescriptorFactory;
@Inject
private ArtifactDAO artifactDAO;
@Inject
private KerberosPrincipalDAO kerberosPrincipalDAO;
@Inject
private KerberosKeytabDAO kerberosKeytabDAO;
@Inject
private KerberosKeytabPrincipalDAO kerberosKeytabPrincipalDAO;
@Inject
private HostDAO hostDAO;
/**
* The injector used to create new instances of helper classes like CreatePrincipalsServerAction
* and CreateKeytabFilesServerAction.
*/
@Inject
private Injector injector;
/**
* The secure storage facility to use to store KDC administrator credential.
*/
@Inject
private CredentialStoreService credentialStoreService;
@Inject
private StackAdvisorHelper stackAdvisorHelper;
@Override
public RequestStageContainer toggleKerberos(Cluster cluster, SecurityType securityType,
RequestStageContainer requestStageContainer,
Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
KerberosDetails kerberosDetails = getKerberosDetails(cluster, manageIdentities);
// Update KerberosDetails with the new security type - the current one in the cluster is the "old" value
kerberosDetails.setSecurityType(securityType);
if (securityType == SecurityType.KERBEROS) {
LOG.info("Configuring Kerberos for realm {} on cluster, {}", kerberosDetails.getDefaultRealm(), cluster.getClusterName());
requestStageContainer = handle(cluster, kerberosDetails, null, null, null, null, requestStageContainer, new EnableKerberosHandler());
} else if (securityType == SecurityType.NONE) {
LOG.info("Disabling Kerberos from cluster, {}", cluster.getClusterName());
requestStageContainer = handle(cluster, kerberosDetails, null, null, null, null, requestStageContainer, new DisableKerberosHandler());
} else {
throw new AmbariException(String.format("Unexpected security type value: %s", securityType.name()));
}
return requestStageContainer;
}
@Override
public RequestStageContainer executeCustomOperations(Cluster cluster, Map<String, String> requestProperties,
RequestStageContainer requestStageContainer,
Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
if (requestProperties != null) {
for (SupportedCustomOperation operation : SupportedCustomOperation.values()) {
if (requestProperties.containsKey(operation.name().toLowerCase())) {
String value = requestProperties.get(operation.name().toLowerCase());
// The operation specific logic is kept in one place and described here
switch (operation) {
case REGENERATE_KEYTABS:
if (cluster.getSecurityType() != SecurityType.KERBEROS) {
throw new AmbariException(String.format("Custom operation %s can only be requested with the security type cluster property: %s", operation.name(), SecurityType.KERBEROS.name()));
}
boolean retryAllowed = false;
if (requestProperties.containsKey(ALLOW_RETRY)) {
String allowRetryString = requestProperties.get(ALLOW_RETRY);
retryAllowed = Boolean.parseBoolean(allowRetryString);
}
CreatePrincipalsAndKeytabsHandler handler = null;
Set<String> hostFilter = parseHostFilter(requestProperties);
Map<String, Set<String>> serviceComponentFilter = parseComponentFilter(requestProperties);
boolean updateConfigurations = !requestProperties.containsKey(DIRECTIVE_IGNORE_CONFIGS)
|| !"true".equalsIgnoreCase(requestProperties.get(DIRECTIVE_IGNORE_CONFIGS));
boolean forceAllHosts = (hostFilter == null) || (hostFilter.contains("*"));
if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
handler = new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.RECREATE_ALL, updateConfigurations, forceAllHosts, true);
} else if ("missing".equalsIgnoreCase(value)) {
handler = new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.CREATE_MISSING, updateConfigurations, forceAllHosts, true);
}
if (handler != null) {
handler.setRetryAllowed(retryAllowed);
requestStageContainer = handle(cluster, getKerberosDetails(cluster, manageIdentities),
serviceComponentFilter, hostFilter, null, null, requestStageContainer, handler);
} else {
throw new AmbariException(String.format("Unexpected directive value: %s", value));
}
break;
default: // No other operations are currently supported
throw new AmbariException(String.format("Custom operation not supported: %s", operation.name()));
}
}
}
}
return requestStageContainer;
}
/**
* Parsing 'Kerberos/hosts' property to get list of hosts for 'regenerate_keytabs' request.
* Must be a string with coma separated list of hosts. Absent or miss-spelled hosts must be silently ignored
* by caller code.
*
* @param requestProperties
* @return
*/
public static Set<String> parseHostFilter(final Map<String, String> requestProperties) {
if (requestProperties.containsKey(DIRECTIVE_HOSTS)) {
return ImmutableSet.copyOf(requestProperties.get(DIRECTIVE_HOSTS).split(","));
}
return null;
}
/**
* Parsing 'Kerberos/components' property to get list of components for 'regenerate_keytabs' request.
* Must be a comma separated list of strings that follow pattern 'SERVICENAME:COMPONENTNAME;ANOTHERCOMPONENTNAME'.
* For example: HDFS:NAMENODE;DATANODE,YARN:RESOURCEMANAGER,ZOOKEEPER:ZOOKEEPER_SERVER;ZOOKEEPER_CLIENT.
* Absent or miss-spelled components and services must be silently ignored by caller code.
*
* @param requestProperties
* @return
*/
public static Map<String, Set<String>> parseComponentFilter(final Map<String, String> requestProperties) {
if (requestProperties.containsKey(DIRECTIVE_COMPONENTS)) {
ImmutableMap.Builder<String, Set<String>> serviceComponentFilter = ImmutableMap.builder();
for (String serviceString : requestProperties.get(DIRECTIVE_COMPONENTS).split(",")) {
String[] serviceComponentsArray = serviceString.split(":");
String serviceName = serviceComponentsArray[0];
if (serviceComponentsArray.length == 2) {
serviceComponentFilter.put(serviceName, ImmutableSet.copyOf(serviceComponentsArray[1].split(";")));
} else {
serviceComponentFilter.put(serviceName, ImmutableSet.of("*"));
}
}
return serviceComponentFilter.build();
}
return null;
}
@Override
public RequestStageContainer ensureIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsToForceKerberosOperations,
RequestStageContainer requestStageContainer, Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, hostFilter, identityFilter,
hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.DEFAULT, false, false,
false));
}
@Override
public RequestStageContainer deleteIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, RequestStageContainer requestStageContainer,
Boolean manageIdentities)
throws AmbariException, KerberosOperationException {
return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, hostFilter, identityFilter, null,
requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
}
/**
* Deletes the kerberos identities of the given component, even if the component is already deleted.
*/
@Override
public void deleteIdentities(Cluster cluster, List<Component> components, Set<String> identities) throws AmbariException, KerberosOperationException {
if (identities.isEmpty()) {
return;
}
LOG.info("Deleting identities: ", identities);
KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
validateKDCCredentials(kerberosDetails, cluster);
File dataDirectory = createTemporaryDirectory();
RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
DeleteIdentityHandler handler = new DeleteIdentityHandler(customCommandExecutionHelper, configuration.getDefaultServerTaskTimeout(), stageFactory, ambariManagementController);
DeleteIdentityHandler.CommandParams commandParameters = new DeleteIdentityHandler.CommandParams(
components,
identities,
ambariManagementController.getAuthName(),
dataDirectory,
kerberosDetails.getDefaultRealm(),
kerberosDetails.getKdcType());
OrderedRequestStageContainer stageContainer = new OrderedRequestStageContainer(
roleGraphFactory,
roleCommandOrder,
new RequestStageContainer(actionManager.getNextRequestId(), null, requestFactory, actionManager));
handler.addDeleteIdentityStages(cluster, stageContainer, commandParameters, kerberosDetails.manageIdentities());
stageContainer.getRequestStageContainer().persist();
}
@Override
public void configureServices(Cluster cluster, Map<String, Collection<String>> serviceFilter)
throws AmbariException, KerberosInvalidConfigurationException {
final Map<String, Set<String>> installedServices = new HashMap<>();
final Set<String> previouslyExistingServices = new HashSet<>();
// Calculate the map of installed services to installed components.
// We can create the map in the "shouldIncludeCommand" Command to avoid having to iterate
// over the returned ServiceComponentHost List.
getServiceComponentHosts(cluster,
new Command<Boolean, ServiceComponentHost>() {
@Override
public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
if (sch != null) {
String serviceName = sch.getServiceName();
Set<String> installedComponents = installedServices.get(serviceName);
if (installedComponents == null) {
installedComponents = new HashSet<>();
installedServices.put(serviceName, installedComponents);
}
installedComponents.add(sch.getServiceComponentName());
// Determine if this component was PREVIOUSLY installed, which implies that its containing service was PREVIOUSLY installed
if (!previouslyExistingServices.contains(serviceName) && PREVIOUSLY_INSTALLED_STATES.contains(sch.getState())) {
previouslyExistingServices.add(serviceName);
}
return true;
}
return false;
}
});
Map<String, Map<String, String>> existingConfigurations = calculateExistingConfigurations(cluster, null);
Map<String, Map<String, String>> updates = getServiceConfigurationUpdates(cluster,
existingConfigurations, installedServices, serviceFilter, previouslyExistingServices, true, true);
// Store the updates...
for (Map.Entry<String, Map<String, String>> entry : updates.entrySet()) {
configHelper.updateConfigType(cluster, cluster.getDesiredStackVersion(),
ambariManagementController, entry.getKey(), entry.getValue(), null,
ambariManagementController.getAuthName(), "Enabling Kerberos for added components");
}
}
@Override
public Map<String, Map<String, String>> getServiceConfigurationUpdates(Cluster cluster,
Map<String, Map<String, String>> existingConfigurations,
Map<String, Set<String>> installedServices,
Map<String, Collection<String>> serviceFilter,
Set<String> previouslyExistingServices,
boolean kerberosEnabled,
boolean applyStackAdvisorUpdates)
throws KerberosInvalidConfigurationException, AmbariException {
Map<String, Map<String, String>> kerberosConfigurations = new HashMap<>();
KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
Map<String, Map<String, String>> configurations = addAdditionalConfigurations(cluster,
deepCopy(existingConfigurations), null, kerberosDescriptorProperties);
Map<String, Set<String>> propertiesToIgnore = new HashMap<>();
// If Ambari is managing it own identities then add AMBARI to the set of installed service so
// that its Kerberos descriptor entries will be included.
if (createAmbariIdentities(existingConfigurations.get(KERBEROS_ENV))) {
installedServices = new HashMap<>(installedServices);
installedServices.put(RootService.AMBARI.name(), Collections.singleton(RootComponent.AMBARI_SERVER.name()));
}
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", configurations);
filterContext.put("services", installedServices.keySet());
for (Map.Entry<String, Set<String>> installedServiceEntry : installedServices.entrySet()) {
String installedService = installedServiceEntry.getKey();
if ((serviceFilter == null) || (serviceFilter.containsKey(installedService))) {
Collection<String> componentFilter = (serviceFilter == null) ? null : serviceFilter.get(installedService);
Set<String> installedComponents = installedServiceEntry.getValue();
// Set properties...
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(installedService);
if (serviceDescriptor != null) {
if (installedComponents != null) {
boolean servicePreviouslyExisted = (previouslyExistingServices != null) && previouslyExistingServices.contains(installedService);
for (String installedComponent : installedComponents) {
if ((componentFilter == null) || componentFilter.contains(installedComponent)) {
KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(installedComponent);
if (componentDescriptor != null) {
Map<String, Map<String, String>> identityConfigurations;
identityConfigurations = getIdentityConfigurations(serviceDescriptor.getIdentities(true, filterContext));
processIdentityConfigurations(identityConfigurations, kerberosConfigurations, configurations, propertiesToIgnore);
identityConfigurations = getIdentityConfigurations(componentDescriptor.getIdentities(true, filterContext));
processIdentityConfigurations(identityConfigurations, kerberosConfigurations, configurations, propertiesToIgnore);
mergeConfigurations(kerberosConfigurations,
componentDescriptor.getConfigurations(!servicePreviouslyExisted), configurations, null);
}
}
}
}
}
}
}
setAuthToLocalRules(cluster, kerberosDescriptor, kerberosDetails.getDefaultRealm(), installedServices, configurations, kerberosConfigurations, false);
return (applyStackAdvisorUpdates)
? applyStackAdvisorUpdates(cluster, installedServices.keySet(), configurations, kerberosConfigurations, propertiesToIgnore,
new HashMap<>(), kerberosEnabled)
: kerberosConfigurations;
}
/**
* Adds host assignments, recommended by the Stack Advisor, to the configuration map (clusterHostInfo)
* for the components specified in the component filter <code>componentFilter</code> (or all if the
* component filter is <code>null</code>).
*
* @param cluster the cluster
* @param services the relevant services to consider
* @param componentFilter the set of components to add to the clusterHostInfo structure
* @param configurations the configurations map to update
* @throws AmbariException if an error occurs
*/
private void applyStackAdvisorHostRecommendations(Cluster cluster,
Set<String> services,
Set<String> componentFilter,
Map<String, Map<String, String>> configurations)
throws AmbariException {
StackId stackVersion = cluster.getCurrentStackVersion();
List<String> hostNames = new ArrayList<>();
Collection<Host> hosts = cluster.getHosts();
if (hosts != null) {
for (Host host : hosts) {
hostNames.add(host.getHostName());
}
}
StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
.forStack(stackVersion.getStackName(), stackVersion.getStackVersion())
.forServices(services)
.forHosts(hostNames)
.withComponentHostsMap(cluster.getServiceComponentHostMap(null, services))
.ofType(StackAdvisorRequest.StackAdvisorRequestType.HOST_GROUPS)
.build();
try {
RecommendationResponse response = stackAdvisorHelper.recommend(request);
RecommendationResponse.Recommendation recommendation = (response == null) ? null : response.getRecommendations();
RecommendationResponse.Blueprint blueprint = (recommendation == null) ? null : recommendation.getBlueprint();
Set<RecommendationResponse.HostGroup> hostGroups = (blueprint == null) ? null : blueprint.getHostGroups();
if (hostGroups != null) {
RecommendationResponse.BlueprintClusterBinding blueprintBinding = recommendation.getBlueprintClusterBinding();
Map<String, RecommendationResponse.BindingHostGroup> bindingMap = new HashMap<>();
if (blueprintBinding != null) {
Set<RecommendationResponse.BindingHostGroup> bindingHostGroups = blueprintBinding.getHostGroups();
if (bindingHostGroups != null) {
for (RecommendationResponse.BindingHostGroup bindingHostGroup : bindingHostGroups) {
bindingMap.put(bindingHostGroup.getName(), bindingHostGroup);
}
}
}
// Get (and created if needed) the clusterHostInfo map
Map<String, String> clusterHostInfoMap = configurations.get("clusterHostInfo");
if (clusterHostInfoMap == null) {
clusterHostInfoMap = new HashMap<>();
configurations.put("clusterHostInfo", clusterHostInfoMap);
}
// Iterate through the recommendations to find the recommended host assignments
for (RecommendationResponse.HostGroup hostGroup : hostGroups) {
Set<Map<String, String>> components = hostGroup.getComponents();
if (components != null) {
RecommendationResponse.BindingHostGroup binding = bindingMap.get(hostGroup.getName());
if (binding != null) {
Set<Map<String, String>> hostGroupHosts = binding.getHosts();
if (hostGroupHosts != null) {
for (Map<String, String> component : components) {
String componentName = component.get("name");
// If the component filter is null or the current component is found in the filter,
// include it in the map
if ((componentFilter == null) || componentFilter.contains(componentName)) {
String key = StageUtils.getClusterHostInfoKey(componentName);
Set<String> fqdns = new TreeSet<>();
// Values are a comma-delimited list of hosts.
// If a value exists, split it and add the tokens to the set
if (!StringUtils.isEmpty(clusterHostInfoMap.get(key))) {
fqdns.addAll(Arrays.asList(clusterHostInfoMap.get(key).split(",")));
}
// Add the set of hosts for the current host group
for (Map<String, String> hostGroupHost : hostGroupHosts) {
String fqdn = hostGroupHost.get("fqdn");
if (!StringUtils.isEmpty(fqdn)) {
fqdns.add(fqdn);
}
}
// create the comma-delimited list of hosts
clusterHostInfoMap.put(key, StringUtils.join(fqdns, ','));
}
}
}
}
}
}
}
} catch (StackAdvisorException e) {
LOG.error("Failed to obtain the recommended host groups for the preconfigured components.", e);
throw new AmbariException(e.getMessage(), e);
}
}
@Override
public Map<String, Map<String, String>> applyStackAdvisorUpdates(Cluster cluster, Set<String> services,
Map<String, Map<String, String>> existingConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
Map<String, Set<String>> propertiesToIgnore,
Map<String, Set<String>> propertiesToRemove,
boolean kerberosEnabled) throws AmbariException {
List<String> hostNames = new ArrayList<>();
Collection<Host> hosts = cluster.getHosts();
if (hosts != null) {
for (Host host : hosts) {
hostNames.add(host.getHostName());
}
}
// Don't actually call the stack advisor if no hosts are in the cluster, else the stack advisor
// will throw a StackAdvisorException stating "Hosts and services must not be empty".
// This could happen when enabling Kerberos while installing a cluster via Blueprints due to the
// way hosts are discovered during the install process.
if (!hostNames.isEmpty()) {
Map<String, Map<String, Map<String, String>>> requestConfigurations = new HashMap<>();
if (existingConfigurations != null) {
for (Map.Entry<String, Map<String, String>> configuration : existingConfigurations.entrySet()) {
Map<String, Map<String, String>> properties = new HashMap<>();
String configType = configuration.getKey();
Map<String, String> configurationProperties = configuration.getValue();
if (configurationProperties == null) {
configurationProperties = Collections.emptyMap();
}
if ("cluster-env".equals(configType)) {
configurationProperties = new HashMap<>(configurationProperties);
configurationProperties.put("security_enabled", (kerberosEnabled) ? "true" : "false");
}
properties.put("properties", configurationProperties);
requestConfigurations.put(configType, properties);
}
}
// Apply the current Kerberos properties...
for (Map.Entry<String, Map<String, String>> configuration : kerberosConfigurations.entrySet()) {
String configType = configuration.getKey();
Map<String, String> configurationProperties = configuration.getValue();
if ((configurationProperties != null) && !configurationProperties.isEmpty()) {
Map<String, Map<String, String>> requestConfiguration = requestConfigurations.get(configType);
if (requestConfiguration == null) {
requestConfiguration = new HashMap<>();
requestConfigurations.put(configType, requestConfiguration);
}
Map<String, String> requestConfigurationProperties = requestConfiguration.get("properties");
if (requestConfigurationProperties == null) {
requestConfigurationProperties = new HashMap<>();
} else {
requestConfigurationProperties = new HashMap<>(requestConfigurationProperties);
}
requestConfigurationProperties.putAll(configurationProperties);
requestConfiguration.put("properties", requestConfigurationProperties);
}
}
Set<StackId> visitedStacks = new HashSet<>();
Map<String, Service> installedServices = cluster.getServices();
for (String serviceName : services) {
Service service = installedServices.get(serviceName);
// Skip services that are not really installed
if (service == null) {
continue;
}
StackId stackId = service.getDesiredStackId();
if (visitedStacks.contains(stackId)) {
continue;
}
for (Map.Entry<String, Map<String, Map<String, String>>> config : requestConfigurations.entrySet()) {
for (Map<String, String> properties : config.getValue().values()) {
for (Map.Entry<String, String> property : properties.entrySet()) {
String oldValue = property.getValue();
String updatedValue = variableReplacementHelper.replaceVariables(property.getValue(), existingConfigurations);
if (!StringUtils.equals(oldValue, updatedValue) && !config.getKey().isEmpty()) {
property.setValue(updatedValue);
if (kerberosConfigurations.containsKey(config.getKey())) {
kerberosConfigurations.get(config.getKey()).put(property.getKey(), updatedValue);
} else {
Map kerberosConfigProperties = new HashMap<>();
kerberosConfigProperties.put(property.getKey(), updatedValue);
kerberosConfigurations.put(config.getKey(), kerberosConfigProperties);
}
}
}
}
}
StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
.forStack(stackId.getStackName(), stackId.getStackVersion())
.forServices(services)
.forHosts(hostNames)
.withComponentHostsMap(cluster.getServiceComponentHostMap(null, services))
.withConfigurations(requestConfigurations)
.ofType(StackAdvisorRequest.StackAdvisorRequestType.KERBEROS_CONFIGURATIONS)
.build();
try {
RecommendationResponse response = stackAdvisorHelper.recommend(request);
RecommendationResponse.Recommendation recommendation = (response == null) ? null : response.getRecommendations();
RecommendationResponse.Blueprint blueprint = (recommendation == null) ? null : recommendation.getBlueprint();
Map<String, RecommendationResponse.BlueprintConfigurations> configurations = (blueprint == null) ? null : blueprint.getConfigurations();
if (configurations != null) {
for (Map.Entry<String, RecommendationResponse.BlueprintConfigurations> configuration : configurations.entrySet()) {
String configType = configuration.getKey();
Map<String, String> recommendedConfigProperties = configuration.getValue().getProperties();
Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes = configuration.getValue().getPropertyAttributes();
Map<String, String> existingConfigProperties = (existingConfigurations == null) ? null : existingConfigurations.get(configType);
Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
Set<String> ignoreProperties = (propertiesToIgnore == null) ? null : propertiesToIgnore.get(configType);
addRecommendedPropertiesForConfigType(kerberosConfigurations, configType, recommendedConfigProperties,
existingConfigProperties, kerberosConfigProperties, ignoreProperties);
if (recommendedConfigPropertyAttributes != null) {
removeRecommendedPropertiesForConfigType(configType, recommendedConfigPropertyAttributes,
existingConfigProperties, kerberosConfigurations, ignoreProperties, propertiesToRemove);
}
}
}
} catch (Exception e) {
throw new AmbariException(e.getMessage(), e);
}
visitedStacks.add(stackId);
}
}
return kerberosConfigurations;
}
/*
* Recommended property will be updated in or added to kerberosConfigurationS.
*/
private void addRecommendedPropertiesForConfigType(Map<String, Map<String, String>> kerberosConfigurations,
String configType, Map<String, String> recommendedConfigProperties,
Map<String, String> existingConfigProperties,
Map<String, String> kerberosConfigProperties,
Set<String> ignoreProperties) {
for (Map.Entry<String, String> property : recommendedConfigProperties.entrySet()) {
String propertyName = property.getKey();
if ((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) {
String recommendedValue = property.getValue();
if ((kerberosConfigProperties == null) || !kerberosConfigProperties.containsKey(propertyName)) {
// There is no explicit update for this property from the Kerberos Descriptor...
// add the config and property if it also does not exist in the existing configurations
if ((existingConfigProperties == null) || !existingConfigProperties.containsKey(propertyName)) {
LOG.debug("Adding Kerberos configuration based on StackAdvisor recommendation:" +
"\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
configType, propertyName, recommendedValue);
if (kerberosConfigProperties == null) {
kerberosConfigProperties = new HashMap<>();
kerberosConfigurations.put(configType, kerberosConfigProperties);
}
kerberosConfigProperties.put(propertyName, recommendedValue);
}
} else {
String value = kerberosConfigProperties.get(propertyName);
if ((value == null) ? (recommendedValue != null) : !value.equals(recommendedValue)) {
// If the recommended value is a change, automatically change it.
LOG.debug("Updating Kerberos configuration based on StackAdvisor recommendation:" +
"\n\tConfigType: {}\n\tProperty: {}\n\tOld Value: {}\n\tNew Value: {}",
configType, propertyName, (value == null) ? "" : value, (recommendedValue == null) ? "" : recommendedValue);
kerberosConfigProperties.put(propertyName, recommendedValue);
}
}
}
}
}
/**
* If property is marked with delete flag in recommendedConfigPropertyAttributes map and is not found in
* ignoreProperties, nor in kerberosConfigProperties but exits in existingConfigProperties add to
* propertiesToRemove map.
*/
private void removeRecommendedPropertiesForConfigType(String configType,
Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes,
Map<String, String> existingConfigProperties,
Map<String, Map<String, String>> kerberosConfigurations,
Set<String> ignoreProperties,
Map<String, Set<String>> propertiesToRemove) {
for (Map.Entry<String, ValueAttributesInfo> property : recommendedConfigPropertyAttributes.entrySet()) {
String propertyName = property.getKey();
if ("true".equalsIgnoreCase(property.getValue().getDelete())) {
// if property is not in ignoreProperties, nor in kerberosConfigProperties but is found in existingConfigProperties
// add to propertiesToBeRemoved map
Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
if (((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) &&
((kerberosConfigProperties == null) || kerberosConfigProperties.get(propertyName) == null) &&
(existingConfigProperties != null && existingConfigProperties.containsKey(propertyName))) {
LOG.debug("Property to remove from configuration based on StackAdvisor recommendation:" +
"\n\tConfigType: {}\n\tProperty: {}",
configType, propertyName);
// kerberosEnabled add property to propertiesToRemove, otherwise to kerberosConfigurations map
if (propertiesToRemove != null) {
Set<String> properties = propertiesToRemove.get(configType);
if (properties == null) {
properties = new HashSet<>();
propertiesToRemove.put(configType, properties);
}
properties.add(propertyName);
} else {
if (kerberosConfigProperties == null) {
kerberosConfigProperties = new HashMap<>();
kerberosConfigurations.put(configType, kerberosConfigProperties);
}
kerberosConfigProperties.put(propertyName, "");
}
}
}
}
}
@Override
public boolean ensureHeadlessIdentities(Cluster cluster, Map<String, Map<String, String>> existingConfigurations, Set<String> services)
throws KerberosInvalidConfigurationException, AmbariException {
KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
// Only perform this task if Ambari manages Kerberos identities
if (kerberosDetails.manageIdentities()) {
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
Map<String, Map<String, String>> configurations = addAdditionalConfigurations(cluster,
deepCopy(existingConfigurations), null, kerberosDescriptorProperties);
Map<String, String> kerberosConfiguration = kerberosDetails.getKerberosEnvProperties();
KerberosOperationHandler kerberosOperationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
PrincipalKeyCredential administratorCredential = getKDCAdministratorCredentials(cluster.getClusterName());
try {
kerberosOperationHandler.open(administratorCredential, kerberosDetails.getDefaultRealm(), kerberosConfiguration);
} catch (KerberosOperationException e) {
String message = String.format("Failed to process the identities, could not properly open the KDC operation handler: %s",
e.getMessage());
LOG.error(message);
throw new AmbariException(message, e);
}
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", configurations);
filterContext.put("services", services);
for (String serviceName : services) {
// Set properties...
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
if (serviceDescriptor != null) {
Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
if (null != componentDescriptors) {
for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
if (componentDescriptor != null) {
List<KerberosIdentityDescriptor> identityDescriptors;
// Handle the service-level Kerberos identities
identityDescriptors = serviceDescriptor.getIdentities(true, filterContext);
if (identityDescriptors != null) {
for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
createIdentity(identityDescriptor, KerberosPrincipalType.USER, kerberosConfiguration, kerberosOperationHandler, configurations, null);
}
}
// Handle the component-level Kerberos identities
identityDescriptors = componentDescriptor.getIdentities(true, filterContext);
if (identityDescriptors != null) {
for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
createIdentity(identityDescriptor, KerberosPrincipalType.USER, kerberosConfiguration, kerberosOperationHandler, configurations, null);
}
}
}
}
}
}
}
// create Ambari principal & keytab, configure JAAS only if 'kerberos-env.create_ambari_principal = true'
if (kerberosDetails.createAmbariPrincipal()) {
installAmbariIdentities(kerberosDescriptor, kerberosOperationHandler, kerberosConfiguration, configurations, kerberosDetails);
}
// The KerberosOperationHandler needs to be closed, if it fails to close ignore the
// exception since there is little we can or care to do about it now.
try {
kerberosOperationHandler.close();
} catch (KerberosOperationException e) {
// Ignore this...
}
}
return true;
}
/**
* Install identities needed by the Ambari server, itself.
* <p>
* The Ambari server needs its own identity for authentication; and, if Kerberos authentication is
* enabled, it needs a SPNEGO principal for ticket validation routines.
* <p>
* Any identities needed by the Ambari server need to be installed separately since an agent may not
* exist on the host and therefore distributing the keytab file(s) to the Ambari server host may
* not be possible using the same workflow used for other hosts in the cluster.
*
* @param kerberosDescriptor the Kerberos descriptor
* @param kerberosOperationHandler the relevant KerberosOperationHandler
* @param kerberosEnvProperties the kerberos-env properties
* @param configurations a map of config-types to property name/value pairs representing
* the existing configurations for the cluster
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos
* configuration
* @throws AmbariException
*/
private void installAmbariIdentities(KerberosDescriptor kerberosDescriptor,
KerberosOperationHandler kerberosOperationHandler,
Map<String, String> kerberosEnvProperties,
Map<String, Map<String, String>> configurations,
KerberosDetails kerberosDetails) throws AmbariException {
// Install Ambari's identities.....
List<KerberosIdentityDescriptor> ambariIdentities = getAmbariServerIdentities(kerberosDescriptor);
if (!ambariIdentities.isEmpty()) {
String ambariServerHostname = StageUtils.getHostName();
for (KerberosIdentityDescriptor identity : ambariIdentities) {
if (identity != null) {
KerberosPrincipalDescriptor principal = identity.getPrincipalDescriptor();
if (principal != null) {
boolean updateJAASFile = AMBARI_SERVER_KERBEROS_IDENTITY_NAME.equals(identity.getName());
Keytab keytab = createIdentity(identity, principal.getType(), kerberosEnvProperties, kerberosOperationHandler, configurations, ambariServerHostname);
installAmbariIdentity(identity, keytab, configurations, ambariServerHostname, kerberosDetails, updateJAASFile);
if (updateJAASFile) {
try {
KerberosChecker.checkJaasConfiguration();
} catch (AmbariException e) {
LOG.error("Error in Ambari JAAS configuration: " + e.getLocalizedMessage(), e);
}
}
}
}
}
}
}
/**
* Performs tasks needed to install the Kerberos identities created for the Ambari server.
*
* @param ambariServerIdentity the ambari server's {@link KerberosIdentityDescriptor}
* @param keytab the Keyab data for the relevant identity
* @param configurations a map of compiled configurations used for variable replacement
* @param hostname the hostname to use to replace _HOST in principal names, if necessary
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos configuration
* @param updateJAASFile true to update Ambari's JAAS file; false otherwise
* @throws AmbariException
* @see ConfigureAmbariIdentitiesServerAction#configureJAAS(String, String, org.apache.ambari.server.serveraction.ActionLog)
*/
private void installAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
Keytab keytab, Map<String, Map<String, String>> configurations,
String hostname,
KerberosDetails kerberosDetails,
boolean updateJAASFile) throws AmbariException {
KerberosPrincipalDescriptor principalDescriptor = ambariServerIdentity.getPrincipalDescriptor();
if (principalDescriptor != null) {
String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
// Replace _HOST with the supplied hostname is either exist
if (!StringUtils.isEmpty(hostname)) {
principal = principal.replace("_HOST", hostname);
}
KerberosKeytabDescriptor keytabDescriptor = ambariServerIdentity.getKeytabDescriptor();
if (keytabDescriptor != null) {
String destKeytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
File destKeytabFile = new File(destKeytabFilePath);
ConfigureAmbariIdentitiesServerAction configureAmbariIdentitiesServerAction = injector.getInstance(ConfigureAmbariIdentitiesServerAction.class);
if (keytab != null) {
try {
KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
File tmpKeytabFile = createTemporaryFile();
try {
if ((operationHandler != null) && operationHandler.createKeytabFile(keytab, tmpKeytabFile)) {
String ownerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
String ownerAccess = keytabDescriptor.getOwnerAccess();
String groupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
String groupAccess = keytabDescriptor.getGroupAccess();
// TODO check if this reliable
String componentName = principal.contains(KerberosHelper.AMBARI_SERVER_KERBEROS_IDENTITY_NAME)
? "AMBARI_SERVER_SELF"
: RootComponent.AMBARI_SERVER.name();
ResolvedKerberosPrincipal resolvedKerberosPrincipal = new ResolvedKerberosPrincipal(
null,
hostname,
principal,
false,
null,
RootService.AMBARI.name(),
componentName,
destKeytabFilePath
);
configureAmbariIdentitiesServerAction.installAmbariServerIdentity(resolvedKerberosPrincipal, tmpKeytabFile.getAbsolutePath(), destKeytabFilePath,
ownerName, ownerAccess, groupName, groupAccess, null);
LOG.debug("Successfully created keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
} else {
LOG.error("Failed to create keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
}
} finally {
tmpKeytabFile.delete();
}
} catch (KerberosOperationException e) {
throw new AmbariException(String.format("Failed to create keytab file for %s at %s: %s:",
principal, destKeytabFile.getAbsolutePath(), e.getLocalizedMessage()), e);
}
} else {
LOG.error("No keytab data is available to create the keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
}
if (updateJAASFile) {
configureAmbariIdentitiesServerAction.configureJAAS(principal, destKeytabFile.getAbsolutePath(), null);
}
}
}
}
@Override
public RequestStageContainer createTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
RequestStageContainer requestStageContainer)
throws KerberosOperationException, AmbariException {
return handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer,
new CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType.DEFAULT, false, false, false));
}
@Override
public RequestStageContainer deleteTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
RequestStageContainer requestStageContainer)
throws KerberosOperationException, AmbariException {
requestStageContainer = handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
return requestStageContainer;
}
@Override
public void validateKDCCredentials(Cluster cluster) throws KerberosMissingAdminCredentialsException,
KerberosAdminAuthenticationException,
KerberosInvalidConfigurationException,
AmbariException {
validateKDCCredentials(null, cluster);
}
@Override
public void setAuthToLocalRules(Cluster cluster,
KerberosDescriptor kerberosDescriptor, String realm,
Map<String, Set<String>> installedServices,
Map<String, Map<String, String>> existingConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
boolean includePreconfigureData)
throws AmbariException {
boolean processAuthToLocalRules = true;
Map<String, String> kerberosEnvProperties = existingConfigurations.get(KERBEROS_ENV);
if (kerberosEnvProperties.containsKey(MANAGE_AUTH_TO_LOCAL_RULES)) {
processAuthToLocalRules = Boolean.valueOf(kerberosEnvProperties.get(MANAGE_AUTH_TO_LOCAL_RULES));
}
if (kerberosDescriptor != null && processAuthToLocalRules) {
Set<String> authToLocalProperties;
Set<String> authToLocalPropertiesToSet = new HashSet<>();
// a flag to be used by the AuthToLocalBuilder marking whether the default realm rule should contain the //L option, indicating username case insensitive behaviour
// the 'kerberos-env' structure is expected to be available here as it was previously validated
boolean caseInsensitiveUser = Boolean.valueOf(existingConfigurations.get(KERBEROS_ENV).get(CASE_INSENSITIVE_USERNAME_RULES));
// Additional realms that need to be handled according to the Kerberos Descriptor
String additionalRealms = kerberosDescriptor.getProperty("additional_realms");
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", existingConfigurations);
filterContext.put("services", installedServices.keySet());
AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder(realm, additionalRealms, caseInsensitiveUser);
// Add in the default configurations for the services that need to be preconfigured. These
// configurations may be needed while calculating the auth-to-local rules.
Map<String, Map<String, String>> replacements = (includePreconfigureData)
? addConfigurationsForPreProcessedServices(deepCopy(existingConfigurations), cluster, kerberosDescriptor, false)
: existingConfigurations;
// Process top-level identities
addIdentities(authToLocalBuilder, kerberosDescriptor.getIdentities(true, filterContext), null, replacements);
// Determine which properties need to be set
authToLocalProperties = kerberosDescriptor.getAuthToLocalProperties();
if (authToLocalProperties != null) {
authToLocalPropertiesToSet.addAll(authToLocalProperties);
}
// Iterate through the services in the Kerberos descriptor. If a found service is installed
// or marked to be preconfigured, add the relevant data to the auth-to-local rules.
Map<String, KerberosServiceDescriptor> serviceDescriptors = kerberosDescriptor.getServices();
if (serviceDescriptors != null) {
for (KerberosServiceDescriptor serviceDescriptor : serviceDescriptors.values()) {
String serviceName = serviceDescriptor.getName();
boolean preconfigure = includePreconfigureData && serviceDescriptor.shouldPreconfigure();
boolean explicitlyAdded = installedServices.containsKey(serviceName);
// Add this service's identities if we are implicitly preconfigurring the service or if the
// service has been explicitly added to the cluster
if (preconfigure || explicitlyAdded) {
LOG.info("Adding identities for service {} to auth to local mapping [{}]",
serviceName,
(explicitlyAdded) ? "explicit" : "preconfigured");
// Process the service-level Kerberos descriptor
addIdentities(authToLocalBuilder, serviceDescriptor.getIdentities(true, filterContext), null, replacements);
authToLocalProperties = serviceDescriptor.getAuthToLocalProperties();
if (authToLocalProperties != null) {
authToLocalPropertiesToSet.addAll(authToLocalProperties);
}
// Process the relevant component-level Kerberos descriptors
Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
if (componentDescriptors != null) {
Set<String> installedServiceComponents = installedServices.get(serviceName);
// Ensure installedComponents is not null....
if (installedServiceComponents == null) {
installedServiceComponents = Collections.emptySet();
}
for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
String componentName = componentDescriptor.getName();
// Add this component's identities if we are implicitly preconfiguring the parent
// service or if the component has been explicitly added to the cluster
if (preconfigure || (installedServiceComponents.contains(componentName))) {
LOG.info("Adding identities for component {} to auth to local mapping", componentName);
addIdentities(authToLocalBuilder, componentDescriptor.getIdentities(true, filterContext), null, replacements);
authToLocalProperties = componentDescriptor.getAuthToLocalProperties();
if (authToLocalProperties != null) {
authToLocalPropertiesToSet.addAll(authToLocalProperties);
}
}
}
}
}
}
}
if (!authToLocalPropertiesToSet.isEmpty()) {
for (String authToLocalProperty : authToLocalPropertiesToSet) {
Matcher m = KerberosDescriptor.AUTH_TO_LOCAL_PROPERTY_SPECIFICATION_PATTERN.matcher(authToLocalProperty);
if (m.matches()) {
AuthToLocalBuilder builder;
try {
builder = (AuthToLocalBuilder) authToLocalBuilder.clone();
} catch (CloneNotSupportedException e) {
LOG.error("Failed to clone the AuthToLocalBuilder: " + e.getLocalizedMessage(), e);
throw new AmbariException("Failed to clone the AuthToLocalBuilder: " + e.getLocalizedMessage(), e);
}
String configType = m.group(1);
String propertyName = m.group(2);
if (configType == null) {
configType = "";
}
// Add existing auth_to_local configuration, if set
Map<String, String> existingConfiguration = existingConfigurations.get(configType);
if (existingConfiguration != null) {
builder.addRules(existingConfiguration.get(propertyName));
}
// Add/update descriptor auth_to_local configuration, if set
Map<String, String> kerberosConfiguration = kerberosConfigurations.get(configType);
if (kerberosConfiguration != null) {
builder.addRules(kerberosConfiguration.get(propertyName));
} else {
kerberosConfiguration = new HashMap<>();
kerberosConfigurations.put(configType, kerberosConfiguration);
}
kerberosConfiguration.put(propertyName,
builder.generate(AuthToLocalBuilder.ConcatenationType.translate(m.group(3))));
}
}
}
}
}
@Override
public List<ServiceComponentHost> getServiceComponentHostsToProcess(final Cluster cluster,
final KerberosDescriptor kerberosDescriptor,
final Map<String, ? extends Collection<String>> serviceComponentFilter,
final Collection<String> hostFilter)
throws AmbariException {
return getServiceComponentHosts(cluster, new Command<Boolean, ServiceComponentHost>() {
@Override
public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
if (sch != null) {
// Check the host filter
if ((hostFilter == null) || hostFilter.contains("*") || hostFilter.contains(sch.getHostName())) {
String serviceName = sch.getServiceName();
// Check the service filter
if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey("*") || serviceComponentFilter.containsKey(serviceName)) {
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
if (serviceDescriptor != null) {
Collection<String> componentFilter = ((serviceComponentFilter == null) || serviceComponentFilter.containsKey("*")) ? null : serviceComponentFilter.get(serviceName);
// Check the service/component filter
return (((componentFilter == null) || componentFilter.contains("*") || componentFilter.contains(sch.getServiceComponentName())));
}
}
}
}
return false;
}
});
}
/**
* Find the {@link ServiceComponentHost}s for the cluster, filtering using the
* supplied "should include" command (<code>shouldIncludeCommand</code>).
* <p>
* If <code>shouldIncludeCommand</code> is <code>null/code>, no filtering will be performed causing
* all found {@link ServiceComponentHost}s to be returned.
*
* @param cluster the cluster
* @param shouldIncludeCommand the filtering logic
* @return a list of (filtered) {@link ServiceComponentHost}s
* @throws AmbariException if an error occurs
*/
private List<ServiceComponentHost> getServiceComponentHosts(Cluster cluster,
Command<Boolean, ServiceComponentHost> shouldIncludeCommand)
throws AmbariException {
List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<>();
// Get the hosts in the cluster
Collection<Host> hosts = cluster.getHosts();
if ((hosts != null) && !hosts.isEmpty()) {
// Iterate over the hosts in the cluster to find the components installed in each.
for (Host host : hosts) {
String hostname = host.getHostName();
// Get a list of components on the current host
List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
// Iterate over the components installed on the current host and execute the shouldIncludeCommand
// Command (if supplied) to get the desired ServiceComponentHost instances.
for (ServiceComponentHost sch : serviceComponentHosts) {
if ((shouldIncludeCommand == null) || shouldIncludeCommand.invoke(sch)) {
serviceComponentHostsToProcess.add(sch);
}
}
}
}
}
return serviceComponentHostsToProcess;
}
@Override
public Set<String> getHostsWithValidKerberosClient(Cluster cluster)
throws AmbariException {
Set<String> hostsWithValidKerberosClient = new HashSet<>();
List<ServiceComponentHost> schKerberosClients = cluster.getServiceComponentHosts(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name());
if (schKerberosClients != null) {
for (ServiceComponentHost sch : schKerberosClients) {
if (sch.getState() == State.INSTALLED) {
hostsWithValidKerberosClient.add(sch.getHostName());
}
}
}
return hostsWithValidKerberosClient;
}
@Override
public KerberosDescriptor getKerberosDescriptor(Cluster cluster, boolean includePreconfigureData) throws AmbariException {
return getKerberosDescriptor(KerberosDescriptorType.COMPOSITE, cluster, false, null, includePreconfigureData);
}
@Override
public KerberosDescriptor getKerberosDescriptor(KerberosDescriptorType kerberosDescriptorType, Cluster cluster,
boolean evaluateWhenClauses, Collection<String> additionalServices,
boolean includePreconfigureData)
throws AmbariException {
// !!! FIXME in a per-service view, what does this become?
Set<StackId> stackIds = new HashSet<>();
for (Service service : cluster.getServices().values()) {
stackIds.add(service.getDesiredStackId());
}
if (1 != stackIds.size()) {
throw new AmbariException("Services are deployed from multiple stacks and cannot determine a unique one.");
}
StackId stackId = stackIds.iterator().next();
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(kerberosDescriptorType, cluster, stackId, includePreconfigureData);
if (evaluateWhenClauses) {
Set<String> services = new HashSet<>(cluster.getServices().keySet());
if (additionalServices != null) {
services.addAll(additionalServices);
}
// Build the context needed to filter out Kerberos identities...
// This includes the current set of configurations for the cluster and the set of installed services
Map<String, Object> context = new HashMap<>();
context.put("configurations", calculateConfigurations(cluster, null, kerberosDescriptor, false, false));
context.put("services", services);
// Get the Kerberos identities that need to be pruned
Map<String, Set<String>> identitiesToRemove = processWhenClauses("", kerberosDescriptor, context, new HashMap<>());
// Prune off the Kerberos identities that need to be removed due to the evaluation of its _when_ clause
for (Map.Entry<String, Set<String>> identity : identitiesToRemove.entrySet()) {
String[] path = identity.getKey().split("/");
AbstractKerberosDescriptorContainer container = null;
// Follow the path to the container that contains the identities to remove
for (String name : path) {
if (container == null) {
container = kerberosDescriptor;
} else {
container = container.getChildContainer(name);
if (container == null) {
break;
}
}
}
// Remove the relevant identities from the found container
if (container != null) {
for (String identityName : identity.getValue()) {
container.removeIdentity(identityName);
}
}
}
}
return kerberosDescriptor;
}
@Override
public KerberosDescriptor getKerberosDescriptor(KerberosDescriptorType kerberosDescriptorType, Cluster cluster,
StackId stackId, boolean includePreconfigureData) throws AmbariException {
KerberosDescriptor stackDescriptor = (kerberosDescriptorType == KerberosDescriptorType.STACK || kerberosDescriptorType == KerberosDescriptorType.COMPOSITE)
? getKerberosDescriptorFromStack(stackId, includePreconfigureData)
: null;
KerberosDescriptor userDescriptor = (kerberosDescriptorType == KerberosDescriptorType.USER || kerberosDescriptorType == KerberosDescriptorType.COMPOSITE)
? getKerberosDescriptorUpdates(cluster)
: null;
return combineKerberosDescriptors(stackDescriptor, userDescriptor);
}
@Override
public Map<String, Map<String, String>> mergeConfigurations(Map<String, Map<String, String>> configurations,
Map<String, KerberosConfigurationDescriptor> updates,
Map<String, Map<String, String>> replacements,
Set<String> configurationTypeFilter)
throws AmbariException {
if ((updates != null) && !updates.isEmpty()) {
if (configurations == null) {
configurations = new HashMap<>();
}
for (Map.Entry<String, KerberosConfigurationDescriptor> entry : updates.entrySet()) {
String type = entry.getKey();
if ((configurationTypeFilter == null) || (configurationTypeFilter.contains(type))) {
KerberosConfigurationDescriptor configurationDescriptor = entry.getValue();
if (configurationDescriptor != null) {
Map<String, String> updatedProperties = configurationDescriptor.getProperties();
mergeConfigurations(configurations, type, updatedProperties, replacements);
}
}
}
}
return configurations;
}
@Override
public Map<String, Map<String, String>> processPreconfiguredServiceConfigurations(Map<String, Map<String, String>> configurations,
Map<String, Map<String, String>> replacements,
Cluster cluster,
KerberosDescriptor kerberosDescriptor)
throws AmbariException {
// Ensure the Kerberos descriptor exists....
if (kerberosDescriptor == null) {
kerberosDescriptor = getKerberosDescriptor(cluster, true);
}
Map<String, KerberosServiceDescriptor> serviceDescriptors = kerberosDescriptor.getServices();
if (serviceDescriptors != null) {
if (configurations == null) {
configurations = new HashMap<>();
}
// Add in the default configurations for the services that need to be preconfigured. These
// configurations may be needed while calculating the auth-to-local rules.
Map<String, Map<String, String>> replacementsWithDefaults = addConfigurationsForPreProcessedServices(deepCopy(replacements), cluster, kerberosDescriptor, true);
Map<String, Service> existingServices = cluster.getServices();
for (KerberosServiceDescriptor serviceDescriptor : serviceDescriptors.values()) {
String serviceName = serviceDescriptor.getName();
boolean shouldPreconfigure = serviceDescriptor.shouldPreconfigure();
if (!existingServices.containsKey(serviceName) && shouldPreconfigure) {
configurations = mergeConfigurations(configurations, serviceDescriptor.getConfigurations(), replacementsWithDefaults, replacements.keySet());
Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
if (componentDescriptors != null) {
for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
configurations = mergeConfigurations(configurations, componentDescriptor.getConfigurations(), replacementsWithDefaults, replacements.keySet());
}
}
}
}
}
return configurations;
}
@Override
public int addIdentities(KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter,
Collection<KerberosIdentityDescriptor> identities,
Collection<String> identityFilter, String hostname, Long hostId, String serviceName,
String componentName, Map<String, Map<String, String>> kerberosConfigurations,
Map<String, Map<String, String>> configurations,
Map<String, ResolvedKerberosKeytab> resolvedKeytabs, String realm)
throws IOException {
int identitiesAdded = 0;
if (identities != null) {
for (KerberosIdentityDescriptor identity : identities) {
// If there is no filter or the filter contains the current identity's path...
if ((identityFilter == null) || identityFilter.contains(identity.getPath())) {
KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
String principal = null;
String principalType = null;
String principalConfiguration = null;
if (principalDescriptor != null) {
principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
principalType = KerberosPrincipalType.translate(principalDescriptor.getType());
principalConfiguration = variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations);
}
if (principal != null) {
KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
String keytabFilePath = null;
String keytabFileOwnerName = null;
String keytabFileOwnerAccess = null;
String keytabFileGroupName = null;
String keytabFileGroupAccess = null;
String keytabFileConfiguration = null;
if (keytabDescriptor != null) {
keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
keytabFileOwnerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
keytabFileOwnerAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations);
keytabFileGroupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
keytabFileGroupAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations);
keytabFileConfiguration = variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations);
}
if (keytabFileOwnerName == null || keytabFileGroupName == null) {
LOG.warn("Missing owner ({}) or group name ({}) of kerberos descriptor {}", keytabFileOwnerName, keytabFileGroupName, keytabDescriptor.getName());
}
// Evaluate the principal "pattern" found in the record to generate the "evaluated principal"
// by replacing the _HOST and _REALM variables.
String evaluatedPrincipal = principal.replace("_HOST", hostname).replace("_REALM", realm);
ResolvedKerberosKeytab resolvedKeytab = new ResolvedKerberosKeytab(
keytabFilePath,
keytabFileOwnerName,
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
Sets.newHashSet(new ResolvedKerberosPrincipal(
hostId,
hostname,
evaluatedPrincipal,
"service".equalsIgnoreCase(principalType),
null,
serviceName,
componentName,
keytabFilePath
)
),
serviceName.equalsIgnoreCase(RootService.AMBARI.name()),
componentName.equalsIgnoreCase("AMBARI_SERVER_SELF")
);
if (resolvedKeytabs.containsKey(keytabFilePath)) {
ResolvedKerberosKeytab sameKeytab = resolvedKeytabs.get(keytabFilePath);
// validating owner and group
boolean differentOwners = false;
String warnTemplate = "Keytab '{}' on host '{}' has different {}, originally set to '{}' and '{}:{}' has '{}', using '{}'";
if (!resolvedKeytab.getOwnerName().equals(sameKeytab.getOwnerName())) {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "owners", sameKeytab.getOwnerName(),
serviceName, componentName, resolvedKeytab.getOwnerName(),
sameKeytab.getOwnerName());
differentOwners = true;
}
if (!resolvedKeytab.getOwnerAccess().equals(sameKeytab.getOwnerAccess())) {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "owner access", sameKeytab.getOwnerAccess(),
serviceName, componentName, resolvedKeytab.getOwnerAccess(),
sameKeytab.getOwnerAccess());
}
// TODO probably fail on group difference. Some services can inject its principals to same keytab, but
// TODO with different owners, so make sure that keytabs are accessible through group acls
// TODO this includes same group name and group 'r' mode
if (!StringUtils.equals(resolvedKeytab.getGroupName(), sameKeytab.getGroupName())) {
if (differentOwners) {
LOG.error(warnTemplate,
keytabFilePath, hostname, "groups", sameKeytab.getGroupName(),
serviceName, componentName, resolvedKeytab.getGroupName(),
sameKeytab.getGroupName());
} else {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "groups", sameKeytab.getGroupName(),
serviceName, componentName, resolvedKeytab.getGroupName(),
sameKeytab.getGroupName());
}
}
if (!StringUtils.equals(resolvedKeytab.getGroupAccess(), sameKeytab.getGroupAccess())) {
if (differentOwners) {
if (!sameKeytab.getGroupAccess().contains("r")) {
LOG.error("Keytab '{}' on host '{}' referenced by multiple identities which have different owners," +
"but 'r' attribute missing for group. Make sure all users (that need this keytab) are in '{}' +" +
"group and keytab can be read by this group",
keytabFilePath,
hostname,
sameKeytab.getGroupName()
);
}
LOG.error(warnTemplate,
keytabFilePath, hostname, "group access", sameKeytab.getGroupAccess(),
serviceName, componentName, resolvedKeytab.getGroupAccess(),
sameKeytab.getGroupAccess());
} else {
LOG.warn(warnTemplate,
keytabFilePath, hostname, "group access", sameKeytab.getGroupAccess(),
serviceName, componentName, resolvedKeytab.getGroupAccess(),
sameKeytab.getGroupAccess());
}
}
// end validating
// merge principal to keytab
sameKeytab.mergePrincipals(resolvedKeytab);
// ensure that keytab file on ambari-server host creating jass file
if (sameKeytab.isMustWriteAmbariJaasFile() || resolvedKeytab.isMustWriteAmbariJaasFile()) {
sameKeytab.setMustWriteAmbariJaasFile(true);
}
// ensure that this keytab is ambari-keytab, server will distribute it manually
if (sameKeytab.isAmbariServerKeytab() || resolvedKeytab.isAmbariServerKeytab()) {
sameKeytab.setAmbariServerKeytab(true);
}
} else {
resolvedKeytabs.put(keytabFilePath, resolvedKeytab);
LOG.info("Keytab {} owner:'{}:{}', group:'{}:{}' is defined", keytabFilePath,
keytabFileOwnerName, keytabFileOwnerAccess, keytabFileGroupName, keytabFileGroupAccess);
}
// Append an entry to the action data file builder...
// TODO obsolete, move to ResolvedKerberosKeytab
if (kerberosIdentityDataFileWriter != null) {
kerberosIdentityDataFileWriter.writeRecord(
hostname,
serviceName,
componentName,
evaluatedPrincipal,
principalType,
keytabFilePath,
keytabFileOwnerName,
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
"true");
}
// Add the principal-related configuration to the map of configurations
mergeConfiguration(kerberosConfigurations, principalConfiguration, principal, null);
// Add the keytab-related configuration to the map of configurations
mergeConfiguration(kerberosConfigurations, keytabFileConfiguration, keytabFilePath, null);
identitiesAdded++;
}
}
}
}
return identitiesAdded;
}
@Override
public Map<String, Map<String, String>> calculateConfigurations(Cluster cluster, String hostname,
KerberosDescriptor kerberosDescriptor,
boolean includePreconfigureData,
boolean calculateClusterHostInfo)
throws AmbariException {
Map<String, Map<String, String>> calculatedConfigurations = addAdditionalConfigurations(
cluster,
calculateExistingConfigurations(cluster, hostname),
hostname,
(kerberosDescriptor == null) ? null : kerberosDescriptor.getProperties());
if (includePreconfigureData) {
calculatedConfigurations = addConfigurationsForPreProcessedServices(calculatedConfigurations, cluster, kerberosDescriptor, calculateClusterHostInfo);
}
return calculatedConfigurations;
}
private Map<String, String> principalNames(Cluster cluster, Map<String, Map<String, String>> configuration) throws AmbariException {
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, String> each : getKerberosDescriptor(cluster, false).principals().entrySet()) {
result.put(each.getKey(), variableReplacementHelper.replaceVariables(each.getValue(), configuration));
}
return result;
}
@Override
public Map<String, Collection<KerberosIdentityDescriptor>> getActiveIdentities(String clusterName,
String hostName,
String serviceName,
String componentName,
boolean replaceHostNames)
throws AmbariException {
if ((clusterName == null) || clusterName.isEmpty()) {
throw new IllegalArgumentException("Invalid argument, cluster name is required");
}
Cluster cluster = clusters.getCluster(clusterName);
if (cluster == null) {
throw new AmbariException(String.format("The cluster object for the cluster name %s is not available", clusterName));
}
Map<String, Collection<KerberosIdentityDescriptor>> activeIdentities = new HashMap<>();
// Only calculate the active identities if the kerberos-env configurtaion is available. Else
// important information like the realm will be missing (kerberos-env/realm)
Config kerberosEnvConfig = cluster.getDesiredConfigByType(KERBEROS_ENV);
if (kerberosEnvConfig == null) {
LOG.debug("Calculating the active identities for {} is being skipped since the kerberos-env configuration is not available",
clusterName, cluster.getSecurityType().name(), SecurityType.KERBEROS.name());
} else {
Collection<String> hosts;
String ambariServerHostname = StageUtils.getHostName();
if (hostName == null) {
Map<String, Host> hostMap = clusters.getHostsForCluster(clusterName);
if (hostMap == null) {
hosts = Collections.emptySet();
} else {
hosts = hostMap.keySet();
}
if (!hosts.contains(ambariServerHostname)) {
Collection<String> extendedHosts = new ArrayList<>(hosts.size() + 1);
extendedHosts.addAll(hosts);
extendedHosts.add(ambariServerHostname);
hosts = extendedHosts;
}
} else {
hosts = Collections.singleton(hostName);
}
if (!hosts.isEmpty()) {
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
if (kerberosDescriptor != null) {
Set<String> existingServices = cluster.getServices().keySet();
for (String hostname : hosts) {
// Calculate the current host-specific configurations. These will be used to replace
// variables within the Kerberos descriptor data
Map<String, Map<String, String>> configurations = calculateConfigurations(cluster,
hostname,
kerberosDescriptor,
false,
false);
// Create the context to use for filtering Kerberos Identities based on the state of the cluster
Map<String, Object> filterContext = new HashMap<>();
filterContext.put("configurations", configurations);
filterContext.put("services", existingServices);
Map<String, KerberosIdentityDescriptor> hostActiveIdentities = new HashMap<>();
List<KerberosIdentityDescriptor> identities = getActiveIdentities(cluster, hostname,
serviceName, componentName, kerberosDescriptor, filterContext);
if (hostname.equals(ambariServerHostname)) {
// Determine if we should _calculate_ the Ambari service identities.
// If kerberos-env/create_ambari_principal is not set to false the identity should be calculated.
if (createAmbariIdentities(kerberosEnvConfig.getProperties())) {
List<KerberosIdentityDescriptor> ambariIdentities = getAmbariServerIdentities(kerberosDescriptor);
if (ambariIdentities != null) {
identities.addAll(ambariIdentities);
}
}
}
if (!identities.isEmpty()) {
for (KerberosIdentityDescriptor identity : identities) {
KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
String principal = null;
if (principalDescriptor != null) {
principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
}
if (principal != null) {
KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
String keytabFile = null;
if (keytabDescriptor != null) {
keytabFile = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
}
if (replaceHostNames) {
principal = principal.replace("_HOST", hostname);
}
String uniqueKey = String.format("%s|%s", principal, (keytabFile == null) ? "" : keytabFile);
if (!hostActiveIdentities.containsKey(uniqueKey)) {
KerberosPrincipalType principalType = principalDescriptor.getType();
// Assume the principal is a service principal if not specified
if (principalType == null) {
principalType = KerberosPrincipalType.SERVICE;
}
KerberosPrincipalDescriptor resolvedPrincipalDescriptor =
new KerberosPrincipalDescriptor(principal,
principalType,
variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations),
variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
KerberosKeytabDescriptor resolvedKeytabDescriptor;
if (keytabFile == null) {
resolvedKeytabDescriptor = null;
} else {
resolvedKeytabDescriptor =
new KerberosKeytabDescriptor(
keytabFile,
variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations),
variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations),
keytabDescriptor.isCachable());
}
hostActiveIdentities.put(uniqueKey, new KerberosIdentityDescriptor(
identity.getName(),
identity.getReference(),
resolvedPrincipalDescriptor,
resolvedKeytabDescriptor,
identity.getWhen()));
}
}
}
}
activeIdentities.put(hostname, hostActiveIdentities.values());
}
}
}
}
return activeIdentities;
}
@Override
public List<KerberosIdentityDescriptor> getAmbariServerIdentities(KerberosDescriptor kerberosDescriptor) throws AmbariException {
List<KerberosIdentityDescriptor> ambariIdentities = new ArrayList<>();
KerberosServiceDescriptor ambariKerberosDescriptor = kerberosDescriptor.getService(RootService.AMBARI.name());
if (ambariKerberosDescriptor != null) {
List<KerberosIdentityDescriptor> serviceIdentities = ambariKerberosDescriptor.getIdentities(true, null);
KerberosComponentDescriptor ambariServerKerberosComponentDescriptor = ambariKerberosDescriptor.getComponent(RootComponent.AMBARI_SERVER.name());
if (serviceIdentities != null) {
ambariIdentities.addAll(serviceIdentities);
}
if (ambariServerKerberosComponentDescriptor != null) {
List<KerberosIdentityDescriptor> componentIdentities = ambariServerKerberosComponentDescriptor.getIdentities(true, null);
if (componentIdentities != null) {
ambariIdentities.addAll(componentIdentities);
}
}
}
return ambariIdentities;
}
@Override
public boolean createAmbariIdentities(Map<String, String> kerberosEnvProperties) {
return (kerberosEnvProperties == null) || !"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
}
/**
* Gets the previously stored KDC administrator credential.
* <p/>
* This implementation accesses the secure CredentialStoreService instance to get the data.
*
* @param clusterName the name of the relevant cluster
* @return a PrincipalKeyCredential or null, if the KDC administrator credential is not available
* @throws AmbariException if an error occurs while retrieving the credentials
*/
@Override
public PrincipalKeyCredential getKDCAdministratorCredentials(String clusterName) throws AmbariException {
Credential credentials = credentialStoreService.getCredential(clusterName, KDC_ADMINISTRATOR_CREDENTIAL_ALIAS);
if (credentials instanceof PrincipalKeyCredential) {
return (PrincipalKeyCredential) credentials;
} else {
return null;
}
}
/**
* Creates and saves underlying {@link org.apache.ambari.server.orm.entities.KerberosPrincipalEntity},
* {@link org.apache.ambari.server.orm.entities.KerberosKeytabEntity} entities in JPA storage.
*
* @param resolvedKerberosKeytab kerberos keytab to be persisted
*/
@Override
public void createResolvedKeytab(ResolvedKerberosKeytab resolvedKerberosKeytab) {
if (kerberosKeytabDAO.find(resolvedKerberosKeytab.getFile()) == null) {
KerberosKeytabEntity kke = new KerberosKeytabEntity(resolvedKerberosKeytab.getFile());
kke.setAmbariServerKeytab(resolvedKerberosKeytab.isAmbariServerKeytab());
kke.setWriteAmbariJaasFile(resolvedKerberosKeytab.isMustWriteAmbariJaasFile());
kke.setOwnerName(resolvedKerberosKeytab.getOwnerName());
kke.setOwnerAccess(resolvedKerberosKeytab.getOwnerAccess());
kke.setGroupName(resolvedKerberosKeytab.getGroupName());
kke.setGroupAccess(resolvedKerberosKeytab.getGroupAccess());
kerberosKeytabDAO.create(kke);
}
for (ResolvedKerberosPrincipal principal : resolvedKerberosKeytab.getPrincipals()) {
if (!kerberosPrincipalDAO.exists(principal.getPrincipal())) {
kerberosPrincipalDAO.create(principal.getPrincipal(), principal.isService());
}
for (Map.Entry<String, String> mappingEntry : principal.getServiceMapping().entries()) {
String serviceName = mappingEntry.getKey();
HostEntity hostEntity = principal.getHostId() != null ? hostDAO.findById(principal.getHostId()) : null;
KerberosKeytabEntity kke = kerberosKeytabDAO.find(resolvedKerberosKeytab.getFile());
KerberosKeytabPrincipalEntity kkp = kerberosKeytabPrincipalDAO.findOrCreate(kke, hostEntity, kerberosPrincipalDAO.find(principal.getPrincipal()));
if(kkp.putServiceMapping(serviceName, mappingEntry.getValue())) {
kerberosKeytabPrincipalDAO.merge(kkp);
}
kerberosKeytabDAO.merge(kke);
}
}
}
@Override
public void removeStaleKeytabs(Collection<ResolvedKerberosKeytab> expectedKeytabs) {
}
@Override
public Map<String, Set<String>> translateConfigurationSpecifications(Collection<String> configurationSpecifications) {
Map<String, Set<String>> translation = null;
if (configurationSpecifications != null) {
translation = new HashMap<>();
for (String configurationSpecification : configurationSpecifications) {
Matcher m = KerberosDescriptor.AUTH_TO_LOCAL_PROPERTY_SPECIFICATION_PATTERN.matcher(configurationSpecification);
if (m.matches()) {
String configType = m.group(1);
String propertyName = m.group(2);
if (configType == null) {
configType = "";
}
Set<String> propertyNames = translation.get(configType);
if (propertyNames == null) {
propertyNames = new HashSet<>();
translation.put(configType, propertyNames);
}
propertyNames.add(propertyName);
}
}
}
return translation;
}
/**
* Creates the principal and cached keytab file for the specified identity, if it is determined to
* be of the expected type - user (headless) or service.
* <p/>
* If the identity is not of the expected type, it will be skipped.
*
* @param identityDescriptor the Kerberos identity to process
* @param expectedType the expected principal type
* @param kerberosEnvProperties the kerberos-env properties
* @param kerberosOperationHandler the relevant KerberosOperationHandler
* @param configurations the existing configurations for the cluster
* @param hostname the hostname of the host to create the identity for (nullable)
* @return the relevant keytab data, if successful; otherwise null
* @throws AmbariException
*/
private Keytab createIdentity(KerberosIdentityDescriptor identityDescriptor,
KerberosPrincipalType expectedType, Map<String, String> kerberosEnvProperties,
KerberosOperationHandler kerberosOperationHandler,
Map<String, Map<String, String>> configurations, String hostname)
throws AmbariException {
Keytab keytab = null;
if (identityDescriptor != null) {
KerberosPrincipalDescriptor principalDescriptor = identityDescriptor.getPrincipalDescriptor();
if (principalDescriptor != null) {
// If this principal type is expected, continue, else skip it.
if (expectedType == principalDescriptor.getType()) {
String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
// Replace _HOST with the supplied hostname is either exist
if (!StringUtils.isEmpty(hostname)) {
principal = principal.replace("_HOST", hostname);
}
// If this principal is already in the Ambari database, then don't try to recreate it or it's
// keytab file.
if (!kerberosPrincipalDAO.exists(principal)) {
CreatePrincipalsServerAction.CreatePrincipalResult result;
result = injector.getInstance(CreatePrincipalsServerAction.class).createPrincipal(
principal,
KerberosPrincipalType.SERVICE.equals(expectedType),
kerberosEnvProperties,
kerberosOperationHandler,
false,
null);
if (result == null) {
throw new AmbariException("Failed to create the account for " + principal);
} else {
KerberosKeytabDescriptor keytabDescriptor = identityDescriptor.getKeytabDescriptor();
if (keytabDescriptor != null) {
keytab = injector.getInstance(CreateKeytabFilesServerAction.class).createKeytab(
principal,
result.getPassword(),
result.getKeyNumber(),
kerberosOperationHandler,
true,
true,
null);
if (keytab == null) {
throw new AmbariException("Failed to create the keytab for " + principal);
}
}
}
}
}
}
}
return keytab;
}
/**
* Validate the KDC admin credentials.
*
* @param kerberosDetails the KerberosDetails containing information about the Kerberos configuration
* for the cluster, if null, a new KerberosDetails will be created based on
* information found in the associated cluster
* @param cluster associated cluster
* @throws AmbariException if any other error occurs while trying to validate the credentials
*/
private void validateKDCCredentials(KerberosDetails kerberosDetails, Cluster cluster) throws KerberosMissingAdminCredentialsException,
KerberosAdminAuthenticationException,
KerberosInvalidConfigurationException,
AmbariException {
if (kerberosDetails == null) {
kerberosDetails = getKerberosDetails(cluster, null);
}
if (kerberosDetails.manageIdentities()) {
PrincipalKeyCredential credentials = getKDCAdministratorCredentials(cluster.getClusterName());
if (credentials == null) {
throw new KerberosMissingAdminCredentialsException();
} else {
KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
if (operationHandler == null) {
throw new AmbariException("Failed to get an appropriate Kerberos operation handler.");
} else {
boolean missingCredentials = false;
try {
operationHandler.open(credentials, kerberosDetails.getDefaultRealm(), kerberosDetails.getKerberosEnvProperties());
// todo: this is really odd that open doesn't throw an exception if the credentials are missing
missingCredentials = !operationHandler.testAdministratorCredentials();
} catch (KerberosAdminAuthenticationException e) {
throw new KerberosAdminAuthenticationException(
"Invalid KDC administrator credentials.\n" +
"The KDC administrator credentials must be set as a persisted or temporary credential resource." +
"This may be done by issuing a POST (or PUT for updating) to the /api/v1/clusters/:clusterName/credentials/kdc.admin.credential API entry point with the following payload:\n" +
"{\n" +
" \"Credential\" : {\n" +
" \"principal\" : \"(PRINCIPAL)\", \"key\" : \"(PASSWORD)\", \"type\" : \"(persisted|temporary)\"}\n" +
" }\n" +
"}", e);
} catch (KerberosKDCConnectionException e) {
throw new KerberosInvalidConfigurationException(
"Failed to connect to KDC - " + e.getMessage() + "\n" +
"Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
e);
} catch (KerberosKDCSSLConnectionException e) {
throw new KerberosInvalidConfigurationException(
"Failed to connect to KDC - " + e.getMessage() + "\n" +
"Make sure the server's SSL certificate or CA certificates have been imported into Ambari's truststore.",
e);
} catch (KerberosRealmException e) {
throw new KerberosInvalidConfigurationException(
"Failed to find a KDC for the specified realm - " + e.getMessage() + "\n" +
"Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
e);
} catch (KerberosLDAPContainerException e) {
throw new KerberosInvalidConfigurationException(
"The principal container was not specified\n" +
"Set the 'container_dn' value in the kerberos-env configuration to correct this issue.",
e);
} catch (KerberosOperationException e) {
throw new AmbariException(e.getMessage(), e);
} finally {
try {
operationHandler.close();
} catch (KerberosOperationException e) {
// Ignore this...
}
}
// need to throw this outside of the try/catch so it isn't caught
if (missingCredentials) {
throw new KerberosMissingAdminCredentialsException();
}
}
}
}
}
/**
* Performs operations needed to process Kerberos related tasks on the relevant cluster.
* <p/>
* Iterates through the components installed on the relevant cluster to determine if work
* need to be done. Calls into the Handler implementation to provide guidance and set up stages
* to perform the work needed to complete the relative action.
*
* @param cluster the relevant Cluster
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos configuration
* @param serviceComponentFilter a Map of service names to component names indicating the relevant
* set of services and components - if null, no filter is relevant;
* if empty, the filter indicates no relevant services or components
* @param hostFilter a set of hostname indicating the set of hosts to process -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant hosts
* @param identityFilter a Collection of identity names indicating the relevant identities -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant identities
* @param hostsToForceKerberosOperations a set of host names on which it is expected that the
* Kerberos client is or will be in the INSTALLED state by
* the time the operations targeted for them are to be
* executed - if empty or null, this no hosts will be
* "forced"
* @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
* if null a new RequestStageContainer will be created.
* @param handler a Handler to use to provide guidance and set up stages
* to perform the work needed to complete the relative action
* @return the updated or a new RequestStageContainer containing the stages that need to be
* executed to complete this task; or null if no stages need to be executed.
* @throws AmbariException
* @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
* Kerberos-specific configuration details
*/
@Transactional
RequestStageContainer handle(Cluster cluster,
KerberosDetails kerberosDetails,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter,
Set<String> hostsToForceKerberosOperations,
RequestStageContainer requestStageContainer,
final Handler handler)
throws AmbariException, KerberosOperationException {
final KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
List<ServiceComponentHost> schToProcess = getServiceComponentHostsToProcess(
cluster,
kerberosDescriptor,
serviceComponentFilter,
hostFilter);
// While iterating over all the ServiceComponentHosts find hosts that have KERBEROS_CLIENT
// components in the INSTALLED state and add them to the hostsWithValidKerberosClient Set.
// This is needed to help determine which hosts to perform actions for and create tasks for.
Set<String> hostsWithValidKerberosClient = null;
// Create a temporary directory to store metadata needed to complete this task. Information
// such as which principals and keytabs files to create as well as what configurations need
// to be update are stored in data files in this directory. Any keytab files are stored in
// this directory until they are distributed to their appropriate hosts.
File dataDirectory = null;
// If there are ServiceComponentHosts to process...
if (!schToProcess.isEmpty()) {
validateKDCCredentials(kerberosDetails, cluster);
// Create a temporary directory to store metadata needed to complete this task. Information
// such as which principals and keytabs files to create as well as what configurations need
// to be update are stored in data files in this directory. Any keytab files are stored in
// this directory until they are distributed to their appropriate hosts.
dataDirectory = createTemporaryDirectory();
hostsWithValidKerberosClient = getHostsWithValidKerberosClient(cluster);
// Ensure that that hosts that should be assumed to be in the correct state when needed are
// in the hostsWithValidKerberosClient collection.
if (hostsToForceKerberosOperations != null) {
hostsWithValidKerberosClient.addAll(hostsToForceKerberosOperations);
}
}
// Always set up the necessary stages to perform the tasks needed to complete the operation.
// Some stages may be no-ops, this is expected.
// Gather data needed to create stages and tasks...
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
@Experimental(feature = ExperimentalFeature.MULTI_SERVICE, comment = "The cluster stack id is deprecated")
Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster, cluster.getDesiredStackVersion());
String hostParamsJson = StageUtils.getGson().toJson(hostParams);
String ambariServerHostname = StageUtils.getHostName();
ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
RootComponent.AMBARI_SERVER.name(),
ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
System.currentTimeMillis());
RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
// Use the handler implementation to setup the relevant stages.
handler.createStages(cluster, clusterHostInfoJson,
hostParamsJson, event, roleCommandOrder, kerberosDetails, dataDirectory,
requestStageContainer, schToProcess, serviceComponentFilter, hostFilter, identityFilter,
hostsWithValidKerberosClient);
// Add the finalize stage...
handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
dataDirectory, roleCommandOrder, requestStageContainer, kerberosDetails);
return requestStageContainer;
}
/**
* Performs operations needed to process Kerberos related tasks to manage a (unique) test identity
* on the relevant cluster.
* <p/>
* If Ambari is not managing Kerberos identities, than this method does nothing.
*
* @param cluster the relevant Cluster
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos
* configuration
* @param commandParameters the command parameters map used to read and/or write attributes
* related to this operation
* @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
* if null a new RequestStageContainer will be created.
* @param handler a Handler to use to provide guidance and set up stages
* to perform the work needed to complete the relative action
* @return the updated or a new RequestStageContainer containing the stages that need to be
* executed to complete this task; or null if no stages need to be executed.
* @throws AmbariException
* @throws KerberosOperationException
*/
private RequestStageContainer handleTestIdentity(Cluster cluster,
KerberosDetails kerberosDetails,
Map<String, String> commandParameters, RequestStageContainer requestStageContainer,
Handler handler) throws AmbariException, KerberosOperationException {
if (kerberosDetails.manageIdentities()) {
if (commandParameters == null) {
throw new AmbariException("The properties map must not be null. It is needed to store data related to the service check identity");
}
List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<>();
KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster, false);
// This is needed to help determine which hosts to perform actions for and create tasks for.
Set<String> hostsWithValidKerberosClient = getHostsWithValidKerberosClient(cluster);
// Create a temporary directory to store metadata needed to complete this task. Information
// such as which principals and keytabs files to create as well as what configurations need
// to be update are stored in data files in this directory. Any keytab files are stored in
// this directory until they are distributed to their appropriate hosts.
File dataDirectory = createTemporaryDirectory();
// Calculate the current non-host-specific configurations. These will be used to replace
// variables within the Kerberos descriptor data
Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
String principal = variableReplacementHelper.replaceVariables("${kerberos-env/service_check_principal_name}@${realm}", configurations);
String keytabFilePath = variableReplacementHelper.replaceVariables("${keytab_dir}/kerberos.service_check.${short_date}.keytab", configurations);
String keytabFileOwnerName = variableReplacementHelper.replaceVariables("${cluster-env/smokeuser}", configurations);
String keytabFileOwnerAccess = "rw";
String keytabFileGroupName = variableReplacementHelper.replaceVariables("${cluster-env/user_group}", configurations);
String keytabFileGroupAccess = "r";
// Add the relevant principal name and keytab file data to the command params state
commandParameters.put("principal_name", principal);
commandParameters.put("keytab_file", keytabFilePath);
try {
// Get a list KERBEROS/KERBEROS_CLIENT ServiceComponentHost objects
List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name());
if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
// Iterate over the KERBEROS_CLIENT service component hosts to get the service and
// component-level Kerberos descriptors in order to determine which principals,
// keytab files needed to be created or updated.
for (ServiceComponentHost sch : serviceComponentHosts) {
if (sch.getState() == State.INSTALLED) {
String hostname = sch.getHostName();
KerberosKeytabEntity kke = kerberosKeytabDAO.find(keytabFilePath);
if (kke == null) {
kke = new KerberosKeytabEntity();
kke.setKeytabPath(keytabFilePath);
kke.setOwnerName(keytabFileOwnerName);
kke.setOwnerAccess(keytabFileOwnerAccess);
kke.setGroupName(keytabFileGroupName);
kke.setGroupAccess(keytabFileGroupAccess);
kerberosKeytabDAO.create(kke);
}
// create principals
if (!kerberosPrincipalDAO.exists(principal)) {
kerberosPrincipalDAO.create(principal, false);
}
KerberosKeytabPrincipalEntity kkp = kerberosKeytabPrincipalDAO.findOrCreate(kke, hostDAO.findById(sch.getHost().getHostId()), kerberosPrincipalDAO.find(principal));
if(kkp.putServiceMapping(sch.getServiceName(), sch.getServiceComponentName())) {
kerberosKeytabPrincipalDAO.merge(kkp);
}
kerberosKeytabDAO.merge(kke);
hostsWithValidKerberosClient.add(hostname);
serviceComponentHostsToProcess.add(sch);
}
}
}
} catch (Exception e) {
// make sure to log what is going wrong
LOG.error("Failed " + e);
throw e;
}
// If there are ServiceComponentHosts to process, make sure the administrator credential
// are available
if (!serviceComponentHostsToProcess.isEmpty()) {
try {
validateKDCCredentials(kerberosDetails, cluster);
} catch (Exception e) {
LOG.error("Cannot validate credentials: " + e);
try {
FileUtils.deleteDirectory(dataDirectory);
} catch (Throwable t) {
LOG.warn(String.format("The data directory (%s) was not deleted due to an error condition - {%s}",
dataDirectory.getAbsolutePath(), t.getMessage()), t);
}
throw e;
}
}
// Always set up the necessary stages to perform the tasks needed to complete the operation.
// Some stages may be no-ops, this is expected.
// Gather data needed to create stages and tasks...
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
@Experimental(feature = ExperimentalFeature.MULTI_SERVICE, comment = "The cluster stack id is deprecated")
Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster, cluster.getDesiredStackVersion());
String hostParamsJson = StageUtils.getGson().toJson(hostParams);
String ambariServerHostname = StageUtils.getHostName();
ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
RootComponent.AMBARI_SERVER.name(),
ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
System.currentTimeMillis());
RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
// Use the handler implementation to setup the relevant stages.
// Set the service/component filter to an empty map since the service/component processing
// was done above.
handler.createStages(cluster,
clusterHostInfoJson, hostParamsJson, event, roleCommandOrder, kerberosDetails,
dataDirectory, requestStageContainer, serviceComponentHostsToProcess,
Collections.singletonMap("KERBEROS", Lists.newArrayList("KERBEROS_CLIENT")),
null, Sets.newHashSet(principal), hostsWithValidKerberosClient);
handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
dataDirectory, roleCommandOrder, requestStageContainer, kerberosDetails);
}
return requestStageContainer;
}
/**
* Gathers the Kerberos-related data from configurations and stores it in a new KerberosDetails
* instance.
*
* @param cluster the relevant Cluster
* @param manageIdentities a Boolean value indicating how to override the configured behavior
* of managing Kerberos identities; if null the configured behavior
* will not be overridden
* @return a new KerberosDetails with the collected configuration data
* @throws AmbariException
*/
private KerberosDetails getKerberosDetails(Cluster cluster, Boolean manageIdentities)
throws KerberosInvalidConfigurationException, AmbariException {
KerberosDetails kerberosDetails = new KerberosDetails();
if (cluster == null) {
String message = "The cluster object is not available";
LOG.error(message);
throw new AmbariException(message);
}
Config configKrb5Conf = cluster.getDesiredConfigByType("krb5-conf");
if (configKrb5Conf == null) {
String message = "The 'krb5-conf' configuration is not available";
LOG.error(message);
throw new AmbariException(message);
}
Map<String, String> krb5ConfProperties = configKrb5Conf.getProperties();
if (krb5ConfProperties == null) {
String message = "The 'krb5-conf' configuration properties are not available";
LOG.error(message);
throw new AmbariException(message);
}
Config configKerberosEnv = cluster.getDesiredConfigByType(KERBEROS_ENV);
if (configKerberosEnv == null) {
String message = "The 'kerberos-env' configuration is not available";
LOG.error(message);
throw new AmbariException(message);
}
Map<String, String> kerberosEnvProperties = configKerberosEnv.getProperties();
if (kerberosEnvProperties == null) {
String message = "The 'kerberos-env' configuration properties are not available";
LOG.error(message);
throw new AmbariException(message);
}
kerberosDetails.setSecurityType(cluster.getSecurityType());
kerberosDetails.setDefaultRealm(kerberosEnvProperties.get(DEFAULT_REALM));
kerberosDetails.setKerberosEnvProperties(kerberosEnvProperties);
// If set, override the manage identities behavior
kerberosDetails.setManageIdentities(manageIdentities);
String kdcTypeProperty = kerberosEnvProperties.get(KDC_TYPE);
if ((kdcTypeProperty == null) && kerberosDetails.manageIdentities()) {
String message = "The 'kerberos-env/kdc_type' value must be set to a valid KDC type";
LOG.error(message);
throw new KerberosInvalidConfigurationException(message);
}
KDCType kdcType;
try {
kdcType = KDCType.translate(kdcTypeProperty);
} catch (IllegalArgumentException e) {
String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
LOG.error(message);
throw new AmbariException(message);
}
// Set the KDCType to the the MIT_KDC as a fallback.
kerberosDetails.setKdcType((kdcType == null) ? KDCType.MIT_KDC : kdcType);
return kerberosDetails;
}
/**
* Creates a temporary directory within the system temporary directory
* <p/>
* The resulting directory is to be removed by the caller when desired.
*
* @return a File pointing to the new temporary directory, or null if one was not created
* @throws AmbariException if a new temporary directory cannot be created
*/
@Override
public File createTemporaryDirectory() throws AmbariException {
try {
File temporaryDirectory = getConfiguredTemporaryDirectory();
File directory;
int tries = 0;
long now = System.currentTimeMillis();
do {
directory = new File(temporaryDirectory, String.format("%s%d-%d.d",
KerberosServerAction.DATA_DIRECTORY_PREFIX, now, tries));
if ((directory.exists()) || !directory.mkdirs()) {
directory = null; // Rest and try again...
} else {
LOG.debug("Created temporary directory: {}", directory.getAbsolutePath());
}
} while ((directory == null) && (++tries < 100));
if (directory == null) {
throw new IOException(String.format("Failed to create a temporary directory in %s", temporaryDirectory));
}
return directory;
} catch (IOException e) {
String message = "Failed to create the temporary data directory.";
LOG.error(message, e);
throw new AmbariException(message, e);
}
}
/**
* Merges the specified configuration property in a map of configuration types.
* The supplied property is processed to replace variables using the replacement Map.
* <p/>
* See {@link VariableReplacementHelper#replaceVariables(String, java.util.Map)}
* for information on variable replacement.
*
* @param configurations the Map of configuration types to update
* @param configurationSpecification the config-type/property_name value specifying the property to set
* @param value the value of the property to set
* @param replacements a Map of (grouped) replacement values
* @throws AmbariException
*/
private void mergeConfiguration(Map<String, Map<String, String>> configurations,
String configurationSpecification,
String value,
Map<String, Map<String, String>> replacements) throws AmbariException {
if (configurationSpecification != null) {
String[] parts = configurationSpecification.split("/");
if (parts.length == 2) {
String type = parts[0];
String property = parts[1];
mergeConfigurations(configurations, type, Collections.singletonMap(property, value), replacements);
}
}
}
/**
* Merges configuration from a Map of configuration updates into a main configurations Map. Each
* property in the updates Map is processed to replace variables using the replacement Map.
* <p/>
* See {@link VariableReplacementHelper#replaceVariables(String, java.util.Map)}
* for information on variable replacement.
*
* @param configurations a Map of configurations
* @param type the configuration type
* @param updates a Map of property updates
* @param replacements a Map of (grouped) replacement values
* @throws AmbariException
*/
private void mergeConfigurations(Map<String, Map<String, String>> configurations, String type,
Map<String, String> updates,
Map<String, Map<String, String>> replacements) throws AmbariException {
if (updates != null) {
Map<String, String> existingProperties = configurations.get(type);
if (existingProperties == null) {
existingProperties = new HashMap<>();
configurations.put(type, existingProperties);
}
for (Map.Entry<String, String> property : updates.entrySet()) {
existingProperties.put(
variableReplacementHelper.replaceVariables(property.getKey(), replacements),
variableReplacementHelper.replaceVariables(property.getValue(), replacements)
);
}
}
}
/**
* Adds identities to the AuthToLocalBuilder.
*
* @param authToLocalBuilder the AuthToLocalBuilder to use to build the auth_to_local mapping
* @param identities a List of KerberosIdentityDescriptors to process
* @param identityFilter a Collection of identity names indicating the relevant identities -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant identities
* @param configurations a Map of configurations to use a replacements for variables
* in identity fields
* @throws org.apache.ambari.server.AmbariException
*/
private void addIdentities(AuthToLocalBuilder authToLocalBuilder,
List<KerberosIdentityDescriptor> identities, Collection<String> identityFilter,
Map<String, Map<String, String>> configurations) throws AmbariException {
if (identities != null) {
for (KerberosIdentityDescriptor identity : identities) {
// If there is no filter or the filter contains the current identity's name...
if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
if (principalDescriptor != null) {
authToLocalBuilder.addRule(
variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations),
variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
}
}
}
}
}
/**
* Creates a temporary file within the system temporary directory
* <p/>
* The resulting file is to be removed by the caller when desired.
*
* @return a File pointing to the new temporary file, or null if one was not created
* @throws AmbariException if a new temporary directory cannot be created
*/
protected File createTemporaryFile() throws AmbariException {
try {
return File.createTempFile("tmp", ".tmp", getConfiguredTemporaryDirectory());
} catch (IOException e) {
String message = "Failed to create a temporary file.";
LOG.error(message, e);
throw new AmbariException(message, e);
}
}
/**
* Gets the configured temporary directory.
*
* @return a File pointing to the configured temporary directory
* @throws IOException
*/
protected File getConfiguredTemporaryDirectory() throws IOException {
String tempDirectoryPath = configuration.getServerTempDir();
if (StringUtils.isEmpty(tempDirectoryPath)) {
tempDirectoryPath = System.getProperty("java.io.tmpdir");
}
if (tempDirectoryPath == null) {
throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
}
return new File(tempDirectoryPath);
}
/**
* Creates a new stage
*
* @param id the new stage's id
* @param cluster the relevant Cluster
* @param requestId the relevant request Id
* @param requestContext a String describing the stage
* @param commandParams JSON-encoded command parameters
* @param hostParams JSON-encoded host parameters
* @return a newly created Stage
*/
private Stage createNewStage(long id, Cluster cluster, long requestId,
String requestContext, String commandParams, String hostParams) {
Stage stage = stageFactory.createNew(requestId,
BASE_LOG_DIR + File.pathSeparator + requestId,
cluster.getClusterName(),
cluster.getClusterId(),
requestContext,
commandParams,
hostParams);
stage.setStageId(id);
return stage;
}
/**
* Given a Collection of ServiceComponentHosts generates a unique list of hosts.
*
* @param serviceComponentHosts a Collection of ServiceComponentHosts from which to to retrieve host names
* @param allowedStates a Set of HostStates to use to filter the list of hosts, if null, no filter is applied
* @return a List of (unique) host names
* @throws org.apache.ambari.server.AmbariException
*/
private List<String> createUniqueHostList(Collection<ServiceComponentHost> serviceComponentHosts, Set<HostState> allowedStates)
throws AmbariException {
Set<String> hostNames = new HashSet<>();
Set<String> visitedHostNames = new HashSet<>();
if (serviceComponentHosts != null) {
for (ServiceComponentHost sch : serviceComponentHosts) {
String hostname = sch.getHostName();
if (!visitedHostNames.contains(hostname)) {
// If allowedStates is null, assume the caller doesnt care about the state of the host
// so skip the call to get the relevant Host data and just add the host to the list
if (allowedStates == null) {
hostNames.add(hostname);
} else {
Host host = clusters.getHost(hostname);
if (allowedStates.contains(host.getState())) {
hostNames.add(hostname);
}
}
visitedHostNames.add(hostname);
}
}
}
return new ArrayList<>(hostNames);
}
@Override
public boolean isClusterKerberosEnabled(Cluster cluster) {
return cluster.getSecurityType() == SecurityType.KERBEROS;
}
@Override
public boolean shouldExecuteCustomOperations(SecurityType requestSecurityType, Map<String, String> requestProperties) {
if (((requestSecurityType == SecurityType.KERBEROS) || (requestSecurityType == SecurityType.NONE)) &&
(requestProperties != null) && !requestProperties.isEmpty()) {
for (SupportedCustomOperation type : SupportedCustomOperation.values()) {
if (requestProperties.containsKey(type.name().toLowerCase())) {
return true;
}
}
}
return false;
}
@Override
public Boolean getManageIdentitiesDirective(Map<String, String> requestProperties) {
String value = (requestProperties == null) ? null : requestProperties.get(DIRECTIVE_MANAGE_KERBEROS_IDENTITIES);
return (value == null)
? null
: !"false".equalsIgnoreCase(value);
}
@Override
public boolean getForceToggleKerberosDirective(Map<String, String> requestProperties) {
return (requestProperties != null) && "true".equalsIgnoreCase(requestProperties.get(DIRECTIVE_FORCE_TOGGLE_KERBEROS));
}
@Override
public Map<String, Map<String, String>> getIdentityConfigurations(List<KerberosIdentityDescriptor> identityDescriptors) {
Map<String, Map<String, String>> map = new HashMap<>();
if (identityDescriptors != null) {
for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
KerberosPrincipalDescriptor principalDescriptor = identityDescriptor.getPrincipalDescriptor();
if (principalDescriptor != null) {
putConfiguration(map, principalDescriptor.getConfiguration(), principalDescriptor.getValue());
}
KerberosKeytabDescriptor keytabDescriptor = identityDescriptor.getKeytabDescriptor();
if (keytabDescriptor != null) {
putConfiguration(map, keytabDescriptor.getConfiguration(), keytabDescriptor.getFile());
}
}
}
return map;
}
/**
* Inserts a configuration property and value into a map of configuration types to property
* name/value pair maps.
*
* @param map the Map to insert into
* @param configuration a configuration property in the form of config-type/property_name
* @param value the value of the configuration property
*/
private void putConfiguration(Map<String, Map<String, String>> map, String configuration, String value) {
if (configuration != null) {
String[] principalTokens = configuration.split("/");
if (principalTokens.length == 2) {
String type = principalTokens[0];
String propertyName = principalTokens[1];
Map<String, String> properties = map.get(type);
if (properties == null) {
properties = new HashMap<>();
map.put(type, properties);
}
properties.put(propertyName, value);
}
}
}
/**
* Returns the active identities for the named service component in the cluster.
*
* @param cluster the relevant cluster (mandatory)
* @param hostname the name of a host for which to find results, null indicates all hosts
* @param serviceName the name of a service for which to find results, null indicates all
* services
* @param componentName the name of a component for which to find results, null indicates all
* components
* @param kerberosDescriptor the relevant Kerberos Descriptor
* requested service component
* @param filterContext the context to use for filtering identities based on the state of the cluster
* @return a list of KerberosIdentityDescriptors representing the active identities for the
* @throws AmbariException if an error occurs processing the cluster's active identities
*/
private List<KerberosIdentityDescriptor> getActiveIdentities(Cluster cluster,
String hostname,
String serviceName,
String componentName,
KerberosDescriptor kerberosDescriptor,
Map<String, Object> filterContext)
throws AmbariException {
List<KerberosIdentityDescriptor> identities = new ArrayList<>();
List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
if (serviceComponentHosts != null) {
for (ServiceComponentHost serviceComponentHost : serviceComponentHosts) {
String schServiceName = serviceComponentHost.getServiceName();
String schComponentName = serviceComponentHost.getServiceComponentName();
if (((serviceName == null) || serviceName.equals(schServiceName)) &&
((componentName == null) || componentName.equals(schComponentName))) {
KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(schServiceName);
if (serviceDescriptor != null) {
List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true, filterContext);
if (serviceIdentities != null) {
identities.addAll(serviceIdentities);
}
KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(schComponentName);
if (componentDescriptor != null) {
List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true, filterContext);
if (componentIdentities != null) {
identities.addAll(componentIdentities);
}
}
}
}
}
}
return identities;
}
/**
* Determines the existing configurations for the cluster, related to a given hostname (if provided)
*
* @param cluster the cluster
* @param hostname a hostname
* @return a map of the existing configurations
* @throws AmbariException
*/
private Map<String, Map<String, String>> calculateExistingConfigurations(Cluster cluster, String hostname) throws AmbariException {
// For a configuration type, both tag and an actual configuration can be stored
// Configurations from the tag is always expanded and then over-written by the actual
// global:version1:{a1:A1,b1:B1,d1:D1} + global:{a1:A2,c1:C1,DELETED_d1:x} ==>
// global:{a1:A2,b1:B1,c1:C1}
Map<String, Map<String, String>> configurations = new HashMap<>();
Map<String, Map<String, String>> configurationTags = ambariManagementController.findConfigurationTagsWithOverrides(cluster, hostname);
Map<String, Map<String, String>> configProperties = configHelper.getEffectiveConfigProperties(cluster, configurationTags);
// Apply the configurations saved with the Execution Cmd on top of
// derived configs - This will take care of all the hacks
for (Map.Entry<String, Map<String, String>> entry : configProperties.entrySet()) {
String type = entry.getKey();
Map<String, String> allLevelMergedConfig = entry.getValue();
Map<String, String> configuration = configurations.get(type);
if (configuration == null) {
configuration = new HashMap<>(allLevelMergedConfig);
} else {
Map<String, String> mergedConfig = configHelper.getMergedConfig(allLevelMergedConfig, configuration);
configuration.clear();
configuration.putAll(mergedConfig);
}
configurations.put(type, configuration);
}
return configurations;
}
/**
* Add configurations related to Kerberos, to a previously created map of configurations.
* <p/>
* The supplied map of configurations is expected to be mutable and will be altered.
*
* @param cluster the cluster
* @param configurations a map of configurations
* @param hostname a hostname
* @param kerberosDescriptorProperties the Kerberos descriptor properties
* @return the supplied map of configurations with updates applied
* @throws AmbariException
*/
private Map<String, Map<String, String>> addAdditionalConfigurations(Cluster cluster, Map<String, Map<String, String>> configurations,
String hostname, Map<String, String> kerberosDescriptorProperties)
throws AmbariException {
// A map to hold un-categorized properties. This may come from the KerberosDescriptor
// and will also contain a value for the current host
Map<String, String> generalProperties = configurations.get("");
if (generalProperties == null) {
generalProperties = new HashMap<>();
configurations.put("", generalProperties);
}
// If any properties are set in the calculated KerberosDescriptor, add them into the
// Map of configurations as an un-categorized type (using an empty string)
if (kerberosDescriptorProperties != null) {
generalProperties.putAll(kerberosDescriptorProperties);
}
if (!StringUtils.isEmpty(hostname)) {
// Add the current hostname under "host" and "hostname"
generalProperties.put("host", hostname);
generalProperties.put("hostname", hostname);
}
// Add the current cluster's name
generalProperties.put("cluster_name", cluster.getClusterName());
// Add the current date in short format (MMddyy)
generalProperties.put("short_date", new SimpleDateFormat("MMddyy").format(new Date()));
// add clusterHostInfo config
if (configurations.get("clusterHostInfo") == null) {
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
if (clusterHostInfo != null) {
Map<String, String> componentHosts = new HashMap<>();
clusterHostInfo = StageUtils.substituteHostIndexes(clusterHostInfo);
for (Map.Entry<String, Set<String>> entry : clusterHostInfo.entrySet()) {
componentHosts.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
}
configurations.put("clusterHostInfo", componentHosts);
}
}
configurations.put("principals", principalNames(cluster, configurations));
return configurations;
}
/**
* Creates a deep copy of a map of maps, typically used to copy configuration sets.
*
* @param map the map to copy
* @return a deep copy of the supplied map
*/
private Map<String, Map<String, String>> deepCopy(Map<String, Map<String, String>> map) {
if (map == null) {
return null;
} else {
Map<String, Map<String, String>> copy = new HashMap<>();
for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
Map<String, String> innerMap = entry.getValue();
copy.put(entry.getKey(), (innerMap == null) ? null : new HashMap<>(innerMap));
}
return copy;
}
}
/**
* Get the user-supplied Kerberos descriptor from the set of cluster artifacts
*
* @param cluster the cluster
* @return a Kerberos descriptor
*/
private KerberosDescriptor getKerberosDescriptorUpdates(Cluster cluster) throws AmbariException {
// find instance using name and foreign keys
TreeMap<String, String> foreignKeys = new TreeMap<>();
foreignKeys.put("cluster", String.valueOf(cluster.getClusterId()));
ArtifactEntity entity = artifactDAO.findByNameAndForeignKeys("kerberos_descriptor", foreignKeys);
return (entity == null) ? null : kerberosDescriptorFactory.createInstance(entity.getArtifactData());
}
/**
* Get the default Kerberos descriptor from the specified stack.
*
* @param stackId the relevant stack ID
* @param includePreconfigureData <code>true</code> to include the preconfigure data; otherwise false
* @return a Kerberos Descriptor
* @throws AmbariException if an error occurs while retrieving the Kerberos descriptor
*/
private KerberosDescriptor getKerberosDescriptorFromStack(StackId stackId, boolean includePreconfigureData) throws AmbariException {
return ambariMetaInfo.getKerberosDescriptor(stackId.getStackName(), stackId.getStackVersion(), includePreconfigureData);
// -------------------------------
}
/**
* Recursively walk the Kerberos descriptor tree to find all Kerberos identity definitions and
* determine which should be filtered out.
* <p>
* No actual filtering is performed while processing since any referenced Kerberos identities need
* to be accessible throughout the process. So a map of container path to a list of identities is
* created an returned
*
* @param currentPath
* @param container
* @param context
* @param identitiesToRemove
* @return
* @throws AmbariException
*/
private Map<String, Set<String>> processWhenClauses(String currentPath, AbstractKerberosDescriptorContainer container, Map<String, Object> context, Map<String, Set<String>> identitiesToRemove) throws AmbariException {
// Get the list of this container's identities.
// Do not filter these identities using KerberosIdentityDescriptor#shouldInclude since we will do
// that later.
List<KerberosIdentityDescriptor> identities = container.getIdentities(true, null);
if ((identities != null) && !identities.isEmpty()) {
Set<String> set = null;
for (KerberosIdentityDescriptor identity : identities) {
if (!identity.shouldInclude(context)) {
if (set == null) {
set = new HashSet<>();
identitiesToRemove.put(currentPath, set);
}
set.add(identity.getName());
}
}
}
Collection<? extends AbstractKerberosDescriptorContainer> children = container.getChildContainers();
if (children != null) {
for (AbstractKerberosDescriptorContainer child : children) {
identitiesToRemove = processWhenClauses(currentPath + "/" + child.getName(), child, context, identitiesToRemove);
}
}
return identitiesToRemove;
}
/**
* Processes the configuration values related to a particular Kerberos descriptor identity definition
* by:
* <ol>
* <li>
* merging the declared properties and their values from <code>identityConfigurations</code> with the set of
* Kerberos-related configuration updates in <code>kerberosConfigurations</code>, using the existing cluster
* configurations in <code>configurations</code>
* </li>
* <li>
* ensuring that these properties are not overwritten by recommendations by the stack advisor later
* in the workflow by adding them to the <code>propertiesToIgnore</code> map
* </li>
* </ol>
*
* @param identityConfigurations a map of config-types to property name/value pairs to process
* @param kerberosConfigurations a map of config-types to property name/value pairs to be applied
* as configuration updates
* @param configurations a map of config-types to property name/value pairs representing
* the existing configurations for the cluster
* @param propertiesToIgnore a map of config-types to property names to be ignored while
* processing stack advisor recommendations
* @throws AmbariException
*/
private void processIdentityConfigurations(Map<String, Map<String, String>> identityConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
Map<String, Map<String, String>> configurations,
Map<String, Set<String>> propertiesToIgnore)
throws AmbariException {
if (identityConfigurations != null) {
for (Map.Entry<String, Map<String, String>> identitiyEntry : identityConfigurations.entrySet()) {
String configType = identitiyEntry.getKey();
Map<String, String> properties = identitiyEntry.getValue();
mergeConfigurations(kerberosConfigurations, configType, identitiyEntry.getValue(), configurations);
if ((properties != null) && !properties.isEmpty()) {
Set<String> propertyNames = propertiesToIgnore.get(configType);
if (propertyNames == null) {
propertyNames = new HashSet<>();
propertiesToIgnore.put(configType, propertyNames);
}
propertyNames.addAll(properties.keySet());
}
}
}
}
/**
* Gathers the Kerberos-related configurations for services not yet installed, but flagged to be
* preconfigured.
* <p>
* Only existing configuration types will be updated, new types will not be added since they are
* expected only when the relevant service has been installed. This is to help reduce the number
* of service restarts when new services are added to clusters where Kerberos has been enabled.
* <p>
* If desired, the Stack Advisor will be invoked to request recommended hosts for the component.
* This is needed to fill out the clusterHostInfo structure in the configuration map. For example,
* <code>clusterHostInfo/knox_gateway_hosts</code>
*
* @param configurations the existing configurations (updated in-place)
* @param cluster the cluster
* @param kerberosDescriptor the kerberos descriptor
* @param calculateClusterHostInfo true, to query the Stack Advisor for recommended hosts for the
* preconfigured services and components; false, otherwise
* @return the updated configuration map
* @throws AmbariException if an error occurs
*/
private Map<String, Map<String, String>> addConfigurationsForPreProcessedServices(Map<String, Map<String, String>> configurations,
Cluster cluster,
KerberosDescriptor kerberosDescriptor,
boolean calculateClusterHostInfo)
throws AmbariException {
Map<String, KerberosServiceDescriptor> serviceDescriptorMap = kerberosDescriptor.getServices();
if (serviceDescriptorMap != null) {
Map<String, Service> existingServices = cluster.getServices();
Set<String> allServices = new HashSet<>(existingServices.keySet());
Set<String> componentFilter = new HashSet<>();
StackId stackVersion = cluster.getCurrentStackVersion();
for (KerberosServiceDescriptor serviceDescriptor : serviceDescriptorMap.values()) {
String serviceName = serviceDescriptor.getName();
boolean shouldPreconfigure = serviceDescriptor.shouldPreconfigure();
if (shouldPreconfigure && !existingServices.containsKey(serviceName)) {
if (ambariMetaInfo.isValidService(stackVersion.getStackName(), stackVersion.getStackVersion(), serviceName)) {
ServiceInfo serviceInfo = ambariMetaInfo.getService(stackVersion.getStackName(), stackVersion.getStackVersion(), serviceName);
Collection<PropertyInfo> servicePropertiesInfos = serviceInfo.getProperties();
if (servicePropertiesInfos != null) {
Map<String, Map<String, String>> propertiesToAdd = new HashMap<>();
for (PropertyInfo propertyInfo : servicePropertiesInfos) {
String type = ConfigHelper.fileNameToConfigType(propertyInfo.getFilename());
Map<String, String> map = propertiesToAdd.get(type);
if (map == null) {
map = new HashMap<>();
propertiesToAdd.put(type, map);
}
map.put(propertyInfo.getName(), propertyInfo.getValue());
}
for (Map.Entry<String, Map<String, String>> entry : propertiesToAdd.entrySet()) {
if (!configurations.containsKey(entry.getKey())) {
configurations.put(entry.getKey(), entry.getValue());
}
}
}
// This is only needed if the Stack Advisor is being called to get recommended host
// for components
if (calculateClusterHostInfo) {
// Add the service to preconfigure to the all services set for use later
allServices.add(serviceName);
// Add the components for the service to preconfigure to the component filter
List<ComponentInfo> componentInfos = serviceInfo.getComponents();
if (componentInfos != null) {
for (ComponentInfo componentInfo : componentInfos) {
componentFilter.add(componentInfo.getName());
}
}
}
}
}
}
if (calculateClusterHostInfo && (allServices.size() > existingServices.size())) {
applyStackAdvisorHostRecommendations(cluster, allServices, componentFilter, configurations);
}
}
return configurations;
}
/**
* Combines a stack-level Kerberos descriptor with a user-suppled Kerberos descriptor to creae a
* composite {@link KerberosDescriptor} using the following logic:
* <p>
* <ul>
* <li>
* If both the stack-level and the user-supplied Kerberos descriptors are <code>null</code>,
* return an empty {@link KerberosDescriptor}.
* </li>
* <li>
* If the stack-level Kerberos descriptor is <code>null</code> and the user-supplied Kerberos
* descriptor is <code>non-null</code>, return the user-supplied Kerberos descriptor.
* </li>
* <li>
* If the stack-level Kerberos descriptor is <code>non-null</code> and the user-supplied
* Kerberos descriptor is <code>null</code>, return the stack-level Kerberos descriptor.
* </li>
* <li>
* If neither the stack-level nor the user-supplied Kerberos descriptors are <code>null</code>,
* return the stack-level Kerberos descriptor that has been updated using data from the
* user-supplied Kerberos descriptor.
* </li>
* </ul>
*
* @param stackDescriptor the stack-level Keberos descriptor
* @param userDescriptor the user-supplied Kerberos descriptor
* @return a KerberosDescriptor
*/
private KerberosDescriptor combineKerberosDescriptors(KerberosDescriptor stackDescriptor, KerberosDescriptor userDescriptor) {
KerberosDescriptor kerberosDescriptor;
if (stackDescriptor == null) {
if (userDescriptor == null) {
return new KerberosDescriptor(); // return an empty Kerberos descriptor since we have no data
} else {
kerberosDescriptor = userDescriptor;
}
} else {
if (userDescriptor != null) {
stackDescriptor.update(userDescriptor);
}
kerberosDescriptor = stackDescriptor;
}
return kerberosDescriptor;
}
/* ********************************************************************************************
* Helper classes and enums
* ******************************************************************************************** *\
/**
* A enumeration of the supported custom operations
*/
public enum SupportedCustomOperation {
REGENERATE_KEYTABS
}
/**
* Handler is an interface that needs to be implemented by toggle handler classes to do the
* "right" thing for the task at hand.
*/
private abstract class Handler {
/**
* If (@code true}, allows stages and tasks created with the handler to be
* retried instead of outright failing a task.
*
* @see KerberosHelper#ALLOW_RETRY
*/
protected boolean retryAllowed = false;
/**
* Sets whether tasks created as part of this handler can be retry if they fail. If a task
* cannot be retried it will fail the entire request.
*
* @param retryAllowed
*/
void setRetryAllowed(boolean retryAllowed) {
this.retryAllowed = retryAllowed;
}
/**
* Creates the necessary stages to complete the relevant task and stores them in the supplied
* or a newly created RequestStageContainer.
* <p/>
* If the supplied RequestStageContainer is null, a new one must be created and filled.
* {@link org.apache.ambari.server.controller.internal.RequestStageContainer#persist()} should
* not be called since it is not known if the set of states for this container is complete.
*
* @param cluster the relevant Cluster
* @param clusterHostInfo JSON-encoded clusterHostInfo structure
* @param hostParams JSON-encoded host parameters
* @param event a ServiceComponentHostServerActionEvent to pass to any created tasks
* @param roleCommandOrder the RoleCommandOrder to use to generate the RoleGraph for any newly created Stages
* @param kerberosDetails a KerberosDetails containing the information about the relevant Kerberos configuration
* @param dataDirectory a File pointing to the (temporary) data directory
* @param requestStageContainer a RequestStageContainer to store the new stages in, if null a
* new RequestStageContainer will be created
* @param serviceComponentHosts a List of ServiceComponentHosts that needs to be updated as part of this operation
* @param serviceComponentFilter a Map of service names to component names indicating the relevant
* set of services and components - if null, no filter is relevant;
* if empty, the filter indicates no relevant services or components
* @param hostFilter a set of hostname indicating the set of hosts to process -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant hosts
* @param identityFilter a Collection of identity names indicating the relevant identities -
* if null, no filter is relevant; if empty, the filter indicates no
* relevant identities
* @return the last stage id generated, or -1 if no stages were created
* @throws AmbariException if an error occurs while creating the relevant stages
*/
abstract long createStages(Cluster cluster,
String clusterHostInfo, String hostParams,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder,
KerberosDetails kerberosDetails, File dataDirectory,
RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter,
Set<String> hostsWithValidKerberosClient)
throws AmbariException;
public void addPrepareEnableKerberosOperationsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Preparing Operations",
"{}",
hostParamsJson,
PrepareEnableKerberosServerAction.class,
event,
commandParameters,
"Preparing Operations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addPrepareKerberosIdentitiesStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Preparing Operations",
"{}",
hostParamsJson,
PrepareKerberosIdentitiesServerAction.class,
event,
commandParameters,
"Preparing Operations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addPrepareDisableKerberosOperationsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Preparing Operations",
"{}",
hostParamsJson,
PrepareDisableKerberosServerAction.class,
event,
commandParameters,
"Preparing Operations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addCreatePrincipalsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Create Principals",
"{}",
hostParamsJson,
CreatePrincipalsServerAction.class,
event,
commandParameters,
"Create Principals",
Math.max(ServerAction.DEFAULT_LONG_RUNNING_TASK_TIMEOUT_SECONDS, configuration.getDefaultServerTaskTimeout()));
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addDestroyPrincipalsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Destroy Principals",
"{}",
hostParamsJson,
DestroyPrincipalsServerAction.class,
event,
commandParameters,
"Destroy Principals",
Math.max(ServerAction.DEFAULT_LONG_RUNNING_TASK_TIMEOUT_SECONDS, configuration.getDefaultServerTaskTimeout()));
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addConfigureAmbariIdentityStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Configure Ambari Identity",
"{}",
hostParamsJson,
ConfigureAmbariIdentitiesServerAction.class,
event,
commandParameters,
"Configure Ambari Identity",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addCreateKeytabFilesStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Create Keytabs",
"{}",
hostParamsJson,
CreateKeytabFilesServerAction.class,
event,
commandParameters,
"Create Keytabs",
Math.max(ServerAction.DEFAULT_LONG_RUNNING_TASK_TIMEOUT_SECONDS, configuration.getDefaultServerTaskTimeout()));
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
void addDistributeKeytabFilesStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
List<String> hosts)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Distribute Keytabs",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
if (!hosts.isEmpty()) {
Map<String, String> requestParams = new HashMap<>();
ActionExecutionContext actionExecContext = createActionExecutionContext(
cluster.getClusterName(),
SET_KEYTAB,
createRequestResourceFilters(hosts),
requestParams,
retryAllowed);
customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage,
requestParams, null);
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
/**
* Send a custom command to the KERBEROS_CLIENT to check if there are missing keytabs on each hosts.
*/
void addCheckMissingKeytabsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
List<String> hostsToInclude)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Checking keytabs",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
if (!hostsToInclude.isEmpty()) {
Map<String, String> requestParams = new HashMap<>();
ActionExecutionContext actionExecContext = createActionExecutionContext(
cluster.getClusterName(),
CHECK_KEYTABS,
createRequestResourceFilters(hostsToInclude),
requestParams,
retryAllowed);
customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestParams, null);
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
void addDisableSecurityHookStage(Cluster cluster,
String clusterHostInfoJson,
String hostParamsJson,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Disable security",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
addDisableSecurityCommandToAllServices(cluster, stage);
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
private void addDisableSecurityCommandToAllServices(Cluster cluster, Stage stage) throws AmbariException {
for (Service service : cluster.getServices().values()) {
for (ServiceComponent component : service.getServiceComponents().values()) {
if (!component.getServiceComponentHosts().isEmpty()) {
String firstHost = component.getServiceComponentHosts().keySet().iterator().next(); // it is only necessary to send it to one host
ActionExecutionContext exec = new ActionExecutionContext(
cluster.getClusterName(),
"DISABLE_SECURITY",
singletonList(new RequestResourceFilter(service.getName(), component.getName(), singletonList(firstHost))),
Collections.emptyMap());
customCommandExecutionHelper.addExecutionCommandsToStage(exec, stage, Collections.emptyMap(), null);
}
}
}
}
void addStopZookeeperStage(Cluster cluster,
String clusterHostInfoJson,
String hostParamsJson,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer)
throws AmbariException {
Service zookeeper;
try {
zookeeper = cluster.getService("ZOOKEEPER");
} catch (ServiceNotFoundException e) {
return;
}
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Stopping ZooKeeper",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
for (ServiceComponent component : zookeeper.getServiceComponents().values()) {
Set<String> hosts = component.getServiceComponentHosts().keySet();
ActionExecutionContext exec = new ActionExecutionContext(
cluster.getClusterName(),
"STOP",
singletonList(new RequestResourceFilter(zookeeper.getName(), component.getName(), new ArrayList<>(hosts))),
Collections.emptyMap());
customCommandExecutionHelper.addExecutionCommandsToStage(exec, stage, Collections.emptyMap(), null);
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addDeleteKeytabFilesStage(Cluster cluster, List<ServiceComponentHost> serviceComponentHosts,
String clusterHostInfoJson, String hostParamsJson,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
Set<String> hostsWithValidKerberosClient)
throws AmbariException {
Stage stage = createNewStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Delete Keytabs",
StageUtils.getGson().toJson(commandParameters),
hostParamsJson);
Collection<ServiceComponentHost> filteredComponents = filterServiceComponentHostsForHosts(
new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
if (!filteredComponents.isEmpty()) {
List<String> hostsToUpdate = createUniqueHostList(filteredComponents, Collections.singleton(HostState.HEALTHY));
if (!hostsToUpdate.isEmpty()) {
Map<String, String> requestParams = new HashMap<>();
List<RequestResourceFilter> requestResourceFilters = new ArrayList<>();
RequestResourceFilter reqResFilter = new RequestResourceFilter("KERBEROS", "KERBEROS_CLIENT", hostsToUpdate);
requestResourceFilters.add(reqResFilter);
ActionExecutionContext actionExecContext = new ActionExecutionContext(
cluster.getClusterName(),
REMOVE_KEYTAB,
requestResourceFilters,
requestParams);
customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage,
requestParams, null);
}
}
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addUpdateConfigurationsStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Update Configurations",
"{}",
hostParamsJson,
UpdateKerberosConfigsServerAction.class,
event,
commandParameters,
"Update Service Configurations",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addFinalizeOperationStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
File dataDirectory, RoleCommandOrder roleCommandOrder,
RequestStageContainer requestStageContainer,
KerberosDetails kerberosDetails)
throws AmbariException {
// Add the finalize stage...
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Finalize Operations",
"{}",
hostParamsJson,
FinalizeKerberosServerAction.class,
event,
commandParameters,
"Finalize Operations", 300);
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
public void addCleanupStage(Cluster cluster, String clusterHostInfoJson,
String hostParamsJson, ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters,
RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
throws AmbariException {
Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
cluster,
requestStageContainer.getId(),
"Kerberization Clean Up",
"{}",
hostParamsJson,
CleanupServerAction.class,
event,
commandParameters,
"Kerberization Clean Up",
configuration.getDefaultServerTaskTimeout());
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
roleGraph.build(stage);
requestStageContainer.addStages(roleGraph.getStages());
}
private List<RequestResourceFilter> createRequestResourceFilters(List<String> hostsToInclude) {
List<RequestResourceFilter> requestResourceFilters = new ArrayList<>();
RequestResourceFilter reqResFilter = new RequestResourceFilter(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name(), hostsToInclude);
requestResourceFilters.add(reqResFilter);
return requestResourceFilters;
}
/**
* Creates a new stage with a single task describing the ServerAction class to invoke and the other
* task-related information.
*
* @param id the new stage's id
* @param cluster the relevant Cluster
* @param requestId the relevant request Id
* @param requestContext a String describing the stage
* @param commandParams JSON-encoded command parameters
* @param hostParams JSON-encoded host parameters
* @param actionClass The ServeAction class that implements the action to invoke
* @param event The relevant ServiceComponentHostServerActionEvent
* @param commandParameters a Map of command parameters to attach to the task added to the new
* stage
* @param commandDetail a String declaring a descriptive name to pass to the action - null or an
* empty string indicates no value is to be set
* @param timeout the timeout for the task/action @return a newly created Stage
*/
private Stage createServerActionStage(long id, Cluster cluster, long requestId,
String requestContext,
String commandParams, String hostParams,
Class<? extends ServerAction> actionClass,
ServiceComponentHostServerActionEvent event,
Map<String, String> commandParameters, String commandDetail,
Integer timeout) throws AmbariException {
Stage stage = createNewStage(id, cluster, requestId, requestContext, commandParams, hostParams);
stage.addServerActionCommand(actionClass.getName(), null, Role.AMBARI_SERVER_ACTION,
RoleCommand.EXECUTE, cluster.getClusterName(), event, commandParameters, commandDetail,
ambariManagementController.findConfigurationTagsWithOverrides(cluster, null), timeout,
retryAllowed, false);
return stage;
}
/**
* Creates an {@link ActionExecutionContext} where some of the common values are pre-initialized.
*
* @param clusterName
* @param commandName
* @param resourceFilters
* @param parameters
* @param retryAllowed
* @return
*/
private ActionExecutionContext createActionExecutionContext(String clusterName,
String commandName, List<RequestResourceFilter> resourceFilters,
Map<String, String> parameters, boolean retryAllowed) {
ActionExecutionContext actionExecContext = new ActionExecutionContext(clusterName, SET_KEYTAB,
resourceFilters, parameters);
actionExecContext.setRetryAllowed(retryAllowed);
return actionExecContext;
}
}
/**
* EnableKerberosHandler is an implementation of the Handler interface used to enable Kerberos
* on the relevant cluster
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>create principals</li>
* <li>create keytab files</li>
* <li>distribute keytab files to the appropriate hosts</li>
* <li>update relevant configurations</li>
* </ol>
*/
private class EnableKerberosHandler extends Handler {
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient)
throws AmbariException {
// If there are principals, keytabs, and configurations to process, setup the following sages:
// 1) prepare identities
// 2) generate principals
// 3) generate keytab files
// 4) distribute keytab files
// 5) update configurations
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Enabling Kerberos");
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATIONS, "true");
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
commandParameters.put(KerberosServerAction.INCLUDE_AMBARI_IDENTITY, (kerberosDetails.createAmbariPrincipal()) ? "true" : "false");
commandParameters.put(KerberosServerAction.PRECONFIGURE_SERVICES, kerberosDetails.getPreconfigureServices());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
// *****************************************************************
// Create stage to prepare operations
addPrepareEnableKerberosOperationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
if (kerberosDetails.manageIdentities()) {
List<String> hostsToInclude = calculateHosts(cluster, serviceComponentHosts, hostsWithValidKerberosClient, false);
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
// *****************************************************************
// Create stage to create principals
addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to generate keytabs
addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to distribute and configure keytab for Ambari server and configure JAAS
if (kerberosDetails.createAmbariPrincipal()) {
addConfigureAmbariIdentityStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
}
// *****************************************************************
// Create stage to distribute keytabs
addDistributeKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer, hostsToInclude);
}
// *****************************************************************
// Create stage to update configurations of services
addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
return requestStageContainer.getLastStageId();
}
}
/**
* DisableKerberosHandler is an implementation of the Handler interface used to disable Kerberos
* on the relevant cluster
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>update relevant configurations</li>
* <li>delete keytab files</li>
* <li>remove principals</li>
* <li>restart services</li>
* </ol>
*/
private class DisableKerberosHandler extends Handler {
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter, Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient) throws AmbariException {
// 1) revert configurations
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Disabling Kerberos");
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATIONS, "true");
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
addDisableSecurityHookStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer);
addStopZookeeperStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to prepare operations
addPrepareDisableKerberosOperationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to update configurations of services
addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
if (kerberosDetails.manageIdentities()) {
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
// *****************************************************************
// Create stage to remove principals
addDestroyPrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to delete keytabs
addDeleteKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson,
hostParamsJson, commandParameters, roleCommandOrder, requestStageContainer, hostsWithValidKerberosClient);
}
// *****************************************************************
// Create stage to perform data cleanups (e.g. kerberos descriptor artifact database leftovers)
addCleanupStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
return requestStageContainer.getLastStageId();
}
}
private JsonObject serviceFilterToJsonObject(Map<String, ? extends Collection<String>> serviceComponentFilter) {
Object test = StageUtils.getGson().toJson(serviceComponentFilter);
if (serviceComponentFilter != null) {
JsonObject serviceFilter = new JsonObject();
for (Map.Entry<String, ? extends Collection<String>> filterEntry : serviceComponentFilter.entrySet()) {
if (filterEntry.getValue() != null) {
JsonArray components = new JsonArray();
for (String component : filterEntry.getValue()) {
components.add(new JsonPrimitive(component));
}
serviceFilter.add(filterEntry.getKey(), components);
} else {
serviceFilter.add(filterEntry.getKey(), null);
}
}
return serviceFilter;
}
return null;
}
/**
* CreatePrincipalsAndKeytabsHandler is an implementation of the Handler interface used to create
* principals and keytabs and distribute them throughout the cluster. This is similar to enabling
* Kerberos however no states or configurations will be updated.
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>create principals</li>
* <li>create keytab files</li>
* <li>distribute keytab files to the appropriate hosts</li>
* </ol>
*/
private class CreatePrincipalsAndKeytabsHandler extends Handler {
/**
* The type of Kerberos operation being performed.
*
* @see org.apache.ambari.server.serveraction.kerberos.KerberosServerAction.OperationType
*/
private KerberosServerAction.OperationType operationType;
/**
* A boolean value indicating whether to update service configurations (<code>true</code>)
* or ignore any potential configuration changes (<code>false</code>).
*/
private boolean updateConfigurations;
/**
* A boolean value indicating whether to include all hosts (<code>true</code>) when setting up
* agent-side tasks or to select only the hosts found to be relevant (<code>false</code>).
* <p>
* This is useful if we do not know beforehand, which hosts need to be involved in the operation.
*/
private boolean forceAllHosts;
/**
* A boolean value indicating whether to include Ambari server identity (<code>true</code>)
* or ignore it (<code>false</code>).
*/
private boolean includeAmbariIdentity;
/**
* CreatePrincipalsAndKeytabsHandler constructor to set whether this instance should be used to
* regenerate all keytabs or just the ones that have not been distributed
*
* @param operationType The type of Kerberos operation being performed
* @param updateConfigurations A boolean value indicating whether to update service configurations
* (<code>true</code>) or ignore any potential configuration changes
* @param forceAllHosts A boolean value indicating whether to include all hosts (<code>true</code>)
* when setting up agent-side tasks or to select only the hosts found to be
* relevant (<code>false</code>)
* @param includeAmbariIdentity A boolean value indicating whether to include Ambari server
* identity (<code>true</code>) or ignore it (<code>false</code>)
*/
CreatePrincipalsAndKeytabsHandler(KerberosServerAction.OperationType operationType, boolean updateConfigurations,
boolean forceAllHosts, boolean includeAmbariIdentity) {
this.operationType = operationType;
this.updateConfigurations = updateConfigurations;
this.forceAllHosts = forceAllHosts;
this.includeAmbariIdentity = includeAmbariIdentity;
}
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter,
Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient)
throws AmbariException {
// If there are principals and keytabs to process, setup the following sages:
// 1) prepare identities
// 2) generate principals
// 3) generate keytab files
// 4) distribute keytab files
// 5) update configurations (optional)
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
boolean processAmbariIdentity = includeAmbariIdentity;
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
processAmbariIdentity = serviceComponentFilter.containsKey(RootService.AMBARI.name()) &&
((serviceComponentFilter.get(RootService.AMBARI.name()) == null) || serviceComponentFilter.get(RootService.AMBARI.name()).contains("*") || serviceComponentFilter.get("AMBARI").contains(RootComponent.AMBARI_SERVER.name()));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
processAmbariIdentity = hostFilter.contains("*") || hostFilter.contains(StageUtils.getHostName());
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
commandParameters.put(KerberosServerAction.OPERATION_TYPE, (operationType == null) ? KerberosServerAction.OperationType.DEFAULT.name() : operationType.name());
commandParameters.put(KerberosServerAction.INCLUDE_AMBARI_IDENTITY, (processAmbariIdentity) ? "true" : "false");
if (updateConfigurations) {
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Updated Kerberos-related configurations");
commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATIONS, "true");
}
List<String> hostsToInclude = calculateHosts(cluster, serviceComponentHosts, hostsWithValidKerberosClient, forceAllHosts);
// *****************************************************************
// Create stage to create principals
addPrepareKerberosIdentitiesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
if (kerberosDetails.manageIdentities()) {
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
if (operationType != KerberosServerAction.OperationType.RECREATE_ALL) {
addCheckMissingKeytabsStage(cluster, clusterHostInfoJson, hostParamsJson,
commandParameters, roleCommandOrder, requestStageContainer, hostsToInclude);
}
// *****************************************************************
// Create stage to create principals
addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to generate keytabs
addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to distribute and configure keytab for Ambari server and configure JAAS
if (processAmbariIdentity && kerberosDetails.createAmbariPrincipal()) {
addConfigureAmbariIdentityStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
}
// *****************************************************************
// Create stage to distribute keytabs
addDistributeKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, commandParameters,
roleCommandOrder, requestStageContainer, hostsToInclude);
}
if (updateConfigurations) {
// *****************************************************************
// Create stage to update configurations of services
addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
roleCommandOrder, requestStageContainer);
}
return requestStageContainer.getLastStageId();
}
}
/**
* Filter out ServiceComponentHosts that are on on hosts in the specified set of host names.
* <p/>
* It is expected that the supplied collection is modifiable. It will be modified inplace.
*
* @param serviceComponentHosts a collection of ServiceComponentHost items to test
* @param hosts a set of host names indicating valid hosts
* @return a collection of filtered ServiceComponentHost items
*/
private Collection<ServiceComponentHost> filterServiceComponentHostsForHosts(Collection<ServiceComponentHost> serviceComponentHosts,
Set<String> hosts) {
if ((serviceComponentHosts != null) && (hosts != null)) {
Iterator<ServiceComponentHost> iterator = serviceComponentHosts.iterator();
while (iterator.hasNext()) {
ServiceComponentHost sch = iterator.next();
if (!hosts.contains(sch.getHostName())) {
iterator.remove();
}
}
}
return serviceComponentHosts;
}
/**
* Calculate the hosts to include when issuing agent-side commands.
* <p>
* If forcing all hosts, select only the healthy hosts in the cluster else select only the healthy
* hosts from the set of hosts specified in the collection of relevant {@link ServiceComponentHost}.
*
* @param cluster the cluster
* @param serviceComponentHosts a collction of {@link ServiceComponentHost}s that are
* relevant to the current operation
* @param hostsWithValidKerberosClient the collection of hosts know to have the Kerberos client
* component installed
* @param forceAllHosts true to process all hosts from the cluster rather than use
* the hosts parsed from the set of {@link ServiceComponentHost}s
* @return a filtered list of host names
* @throws AmbariException
*/
private List<String> calculateHosts(Cluster cluster, List<ServiceComponentHost> serviceComponentHosts, Set<String> hostsWithValidKerberosClient, boolean forceAllHosts) throws AmbariException {
if (forceAllHosts) {
List<String> hosts = new ArrayList<>();
Collection<Host> clusterHosts = cluster.getHosts();
if (!CollectionUtils.isEmpty(clusterHosts)) {
for (Host host : clusterHosts) {
if (host.getState() == HostState.HEALTHY) {
hosts.add(host.getHostName());
}
}
}
return hosts;
} else {
Collection<ServiceComponentHost> filteredComponents = filterServiceComponentHostsForHosts(
new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
if (filteredComponents.isEmpty()) {
return Collections.emptyList();
} else {
return createUniqueHostList(filteredComponents, Collections.singleton(HostState.HEALTHY));
}
}
}
/**
* DeletePrincipalsAndKeytabsHandler is an implementation of the Handler interface used to delete
* principals and keytabs throughout the cluster.
* <p/>
* To complete the process, this implementation creates the following stages:
* <ol>
* <li>delete principals</li>
* <li>remove keytab files</li>
* </ol>
*/
private class DeletePrincipalsAndKeytabsHandler extends Handler {
@Override
public long createStages(Cluster cluster,
String clusterHostInfoJson, String hostParamsJson,
ServiceComponentHostServerActionEvent event,
RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
File dataDirectory, RequestStageContainer requestStageContainer,
List<ServiceComponentHost> serviceComponentHosts,
Map<String, ? extends Collection<String>> serviceComponentFilter, Set<String> hostFilter, Collection<String> identityFilter, Set<String> hostsWithValidKerberosClient)
throws AmbariException {
// If a RequestStageContainer does not already exist, create a new one...
if (requestStageContainer == null) {
requestStageContainer = new RequestStageContainer(
actionManager.getNextRequestId(),
null,
requestFactory,
actionManager);
}
if (kerberosDetails.manageIdentities()) {
// If there are principals and keytabs to process, setup the following sages:
// 1) prepare
// 2) delete principals
// 3) delete keytab files
Map<String, String> commandParameters = new HashMap<>();
commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
if (dataDirectory != null) {
commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
}
if (serviceComponentFilter != null) {
commandParameters.put(KerberosServerAction.SERVICE_COMPONENT_FILTER, StageUtils.getGson().toJson(serviceComponentFilter));
}
if (hostFilter != null) {
commandParameters.put(KerberosServerAction.HOST_FILTER, StageUtils.getGson().toJson(hostFilter));
}
if (identityFilter != null) {
commandParameters.put(KerberosServerAction.IDENTITY_FILTER, StageUtils.getGson().toJson(identityFilter));
}
commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
// *****************************************************************
// Create stage to create principals
addPrepareKerberosIdentitiesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to delete principals
addDestroyPrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event,
commandParameters, roleCommandOrder, requestStageContainer);
// *****************************************************************
// Create stage to delete keytabs
addDeleteKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson,
hostParamsJson, commandParameters, roleCommandOrder, requestStageContainer, hostsWithValidKerberosClient);
}
return requestStageContainer.getLastStageId();
}
}
/**
* KerberosDetails is a helper class to hold the details of the relevant Kerberos-specific
* configurations so they may be passed around more easily.
*/
private static class KerberosDetails {
private String defaultRealm;
private KDCType kdcType;
private Map<String, String> kerberosEnvProperties;
private SecurityType securityType;
private Boolean manageIdentities;
public void setDefaultRealm(String defaultRealm) {
this.defaultRealm = defaultRealm;
}
public String getDefaultRealm() {
return defaultRealm;
}
public void setKdcType(KDCType kdcType) {
this.kdcType = kdcType;
}
public KDCType getKdcType() {
return kdcType;
}
public void setKerberosEnvProperties(Map<String, String> kerberosEnvProperties) {
this.kerberosEnvProperties = kerberosEnvProperties;
}
public Map<String, String> getKerberosEnvProperties() {
return kerberosEnvProperties;
}
public void setSecurityType(SecurityType securityType) {
this.securityType = securityType;
}
public SecurityType getSecurityType() {
return securityType;
}
public boolean manageIdentities() {
if (manageIdentities == null) {
return (kerberosEnvProperties == null) ||
!"false".equalsIgnoreCase(kerberosEnvProperties.get(MANAGE_IDENTITIES));
} else {
return manageIdentities;
}
}
public void setManageIdentities(Boolean manageIdentities) {
this.manageIdentities = manageIdentities;
}
public boolean createAmbariPrincipal() {
return (kerberosEnvProperties == null) ||
!"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
}
public String getPreconfigureServices() {
return (kerberosEnvProperties == null) ? "" : kerberosEnvProperties.get(PRECONFIGURE_SERVICES);
}
}
}
|
AMBARI-24063. ZooKeeper Server Start fail during cluster deployment via api (amagyar) (#1498)
|
ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
|
AMBARI-24063. ZooKeeper Server Start fail during cluster deployment via api (amagyar) (#1498)
|
|
Java
|
apache-2.0
|
51b0a81a53218cfad4b2684a837dc8e7f0e7e562
| 0
|
inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service
|
/*
* Copyright 2012 Shared Learning Collaborative, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.dashboard.client;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.slc.sli.api.client.SLIClient;
import org.slc.sli.dashboard.entity.ConfigMap;
import org.slc.sli.dashboard.entity.GenericEntity;
import org.slc.sli.dashboard.entity.util.GenericEntityEnhancer;
import org.slc.sli.dashboard.util.Constants;
import org.slc.sli.dashboard.util.ExecutionTimeLogger;
import org.slc.sli.dashboard.util.JsonConverter;
import org.slc.sli.dashboard.util.SecurityUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This client will use the SDK client to communicate with the SLI API.
*
* @author dwalker
* @author rbloh
* @author iivanisevic
*
*/
public class SDKAPIClient implements APIClient {
private static final Logger LOGGER = LoggerFactory.getLogger(SDKAPIClient.class);
private SLIClient sdkClient;
private String gracePeriod;
/**
* Wrapper for value for the custom store - value is expected json object vs primitive
*
*/
public static class CustomEntityWrapper {
String value;
public CustomEntityWrapper(String value) {
this.value = value;
}
}
/*
* *****************************************************
* API Client Interface Methods
* *****************************************************
*/
/**
* Set the SDK client
*
* @param sdkClient
*/
public void setSdkClient(SLIClient sdkClient) {
this.sdkClient = sdkClient;
}
/**
* Get the SDK client
*
* @return
*/
@Override
public SLIClient getSdkClient() {
return sdkClient;
}
/**
* Set the SLI configured grace period for historical access
*
* @param gracePeriod
*/
public void setGracePeriod(String gracePeriod) {
this.gracePeriod = gracePeriod;
}
/**
* Get the SLI configured grace period for historical access
*
* @return
*/
// @Override
public String getGracePeriod() {
return this.gracePeriod;
}
/**
* Get a resource entity of a specified type which is identified by id and enriched using
* optional parameters
*
* @param token
* @param type
* @param id
* @param params
* @return
*/
@Override
public GenericEntity getEntity(String token, String type, String id, Map<String, String> params) {
return this.readEntity(token, "/" + type + "/" + id + "?" + this.buildQueryString(params), id);
}
/**
* Get a list of resource entities of a specified type which are identified by a list of ids and
* enriched using optional parameters
*
* @param token
* @param type
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getEntities(String token, String type, String ids, Map<String, String> params) {
return this.readEntityList(token, "/" + type + "/" + ids + "?" + this.buildQueryString(params), ids);
}
/**
* Get user's home entity
*
* @param token
* @return
*/
@Override
public GenericEntity getHome(String token) {
return this.readEntity(token, SDKConstants.HOME_ENTITY);
}
/**
* Get the user's unique identifier
*
* @param token
* @return
*/
@Override
public String getId(String token) {
String id = null;
GenericEntity homeEntity = this.getHome(token);
if (homeEntity != null) {
for (Map linkMap : (List<Map>) (homeEntity.get(Constants.ATTR_LINKS))) {
if (linkMap.get(Constants.ATTR_REL).equals(Constants.ATTR_SELF)) {
id = parseId(linkMap);
}
}
}
return id;
}
/**
* Get EdOrg custom data
*
* @param token
* @param id
* @return
*/
@Override
public ConfigMap getEdOrgCustomData(String token, String id) {
GenericEntity ge = (GenericEntity) readCustomEntity(token, SDKConstants.EDORGS_ENTITY + id + SDKConstants.CUSTOM_DATA, GenericEntity.class);
return JsonConverter.fromJson((String) ge.get("config"), ConfigMap.class);
}
/**
* Store EdOrg custom data
*
* @param token
* @param id
* @param configMap
*/
@Override
public void putEdOrgCustomData(String token, String id, ConfigMap configMap) {
GenericEntity configMapEntity = new GenericEntity();
configMapEntity.put("config", JsonConverter.toJson(configMap));
this.createEntity(token, SDKConstants.EDORGS_ENTITY + id + SDKConstants.CUSTOM_DATA, configMapEntity);
}
/**
* Get a list of educational organizations using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getEducationalOrganizations(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.EDORGS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get education organizations for staff member identified by id
*
* @param token
* @param staffId
* @return
*/
@Override
public List<GenericEntity> getEducationOrganizationsForStaff(String token, String staffId) {
return this.readEntityList(token, SDKConstants.STAFF_ENTITY + staffId
+ SDKConstants.STAFF_EDORG_ASSIGNMENT_ASSOC + SDKConstants.EDORGS, staffId);
}
/**
* Get an educational organization identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getEducationalOrganization(String token, String id) {
return this.readEntity(token, SDKConstants.EDORGS_ENTITY + id, id);
}
/**
* Get education organizations for staff member identified by id and matching organization
* category or first if not specified
*
* @param token
* @param staffId
* @param organizationCategory
* @return
*/
@Override
public GenericEntity getEducationOrganizationForStaff(String token, String staffId, String organizationCategory) {
GenericEntity staffEdOrg = null;
List<GenericEntity> edOrgs = this.readEntityList(token, SDKConstants.STAFF_ENTITY + staffId
+ SDKConstants.STAFF_EDORG_ASSIGNMENT_ASSOC + SDKConstants.EDORGS, staffId);
if ((organizationCategory != null) && (organizationCategory.length() > 0)) {
for (GenericEntity edOrg : edOrgs) {
List<String> edOrgCategories = (List<String>) edOrg.get(Constants.ATTR_ORG_CATEGORIES);
if (edOrgCategories != null && edOrgCategories.size() > 0) {
for (String edOrgCategory : edOrgCategories) {
if (edOrgCategory.equals(organizationCategory)) {
staffEdOrg = edOrg;
break;
}
}
}
}
} else if (edOrgs.size() > 0) {
staffEdOrg = edOrgs.get(0);
}
return staffEdOrg;
}
/**
* Get parent educational organizations for the supplied edOrgs
*
* @param token
* @param educationalOrganizations
* @return
*/
@Override
public List<GenericEntity> getParentEducationalOrganizations(String token,
List<GenericEntity> educationalOrganizations) {
List<String> ids = this.extractAttributesFromEntities(educationalOrganizations, Constants.ATTR_PARENT_EDORG);
return this.getEducationalOrganizations(token, ids, null);
}
/**
* Get parent educational organization for the supplied edOrg
*
* @param token
* @param educationalOrganization
* @return
*/
@Override
public GenericEntity getParentEducationalOrganization(String token, GenericEntity educationalOrganization) {
GenericEntity parentEducationOrganization = null;
List<GenericEntity> educationalOrganizations = new ArrayList<GenericEntity>();
educationalOrganizations.add(educationalOrganization);
List<String> ids = this.extractAttributesFromEntities(educationalOrganizations, Constants.ATTR_PARENT_EDORG);
if (ids.size() > 0) {
String parentId = ids.get(0);
parentEducationOrganization = this.getEducationalOrganization(token, parentId);
}
return parentEducationOrganization;
}
/**
* Get a list of all schools depending upon user role
*
* @param token
* @param ids
* @return
*/
@Override
public List<GenericEntity> getSchools(String token, List<String> ids) {
// get schools
List<GenericEntity> schools = this.readEntityList(token, SDKConstants.SCHOOLS_ENTITY + "?" + this.buildQueryString(null));
return schools;
}
public List<GenericEntity> getMySchools(String token, List<String> ids, boolean isEducator) {
List<GenericEntity> schools;
if (isEducator) {
// get schools
schools = this.readEntityList(token,
SDKConstants.TEACHERS_ENTITY + getId(token) + SDKConstants.TEACHER_SCHOOL_ASSOC
+ SDKConstants.SCHOOLS_ENTITY + "?" + this.buildQueryString(null));
} else {
List<GenericEntity> edOrgs = this.readEntityList(token,
SDKConstants.STAFF_ENTITY + getId(token) + SDKConstants.STAFF_EDORG_ASSIGNMENT_ASSOC
+ SDKConstants.EDORGS_ENTITY + "?" + this.buildQueryString(null));
schools = new ArrayList<GenericEntity>();
for (GenericEntity edOrg : edOrgs) {
Map<String, String> query = new HashMap<String, String>();
query.put("parentEducationAgencyReference", (String) edOrg.get("id"));
schools.addAll(this.readEntityList(token,
SDKConstants.EDORGS_ENTITY + "?" + this.buildQueryString(query)));
}
}
return schools;
}
/**
* Get a list of schools using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getSchools(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.SCHOOLS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a school identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getSchool(String token, String id) {
return this.readEntity(token, SDKConstants.SCHOOLS_ENTITY + id, id);
}
/**
* Get a list of all sessions
*
* @param token
* @param params
* @return
*/
@Override
public List<GenericEntity> getSessions(String token, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.SESSIONS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a list of sessions using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getSessions(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.SESSIONS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of sessions for the specified school year
*
* @param token
* @param schoolYear
* @return
*/
@Override
public List<GenericEntity> getSessionsForYear(String token, String schoolYear) {
Map<String, String> params = new HashMap<String, String>();
params.put("schoolYear", schoolYear);
return this.readEntityList(token, SDKConstants.SESSIONS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a session identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getSession(String token, String id) {
return this.readEntity(token, SDKConstants.SESSIONS_ENTITY + id, id);
}
/**
* Get a list of all sections
*
* @param token
* @param params
* @return
*/
@Override
public List<GenericEntity> getSections(String token, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a list of sections using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getSections(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.SECTIONS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get all sections for a non-Educator
*
* @param token
* @param params
* @return
*/
@Override
public List<GenericEntity> getSectionsForNonEducator(String token, Map<String, String> params) {
List<GenericEntity> sections = this.getSections(token, params);
// Enrich sections with session details
enrichSectionsWithSessionDetails(token, sections);
// Enable filtering
sections = filterCurrentSections(sections, true);
return sections;
}
/**
* Get all sections for a Teacher
*
* @param token
* @param teacherId
* @param params
* @return
*/
@Override
public List<GenericEntity> getSectionsForTeacher(String teacherId, String token, Map<String, String> params) {
List<GenericEntity> sections = this.readEntityList(token,
SDKConstants.TEACHERS_ENTITY + teacherId + SDKConstants.TEACHER_SECTION_ASSOC
+ SDKConstants.SECTIONS_ENTITY + "?" + this.buildQueryString(params), teacherId);
// Disable filtering, so just adding section codes to sections with no name
sections = filterCurrentSections(sections, false);
return sections;
}
/**
* Get a list of sections for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getSectionsForStudent(final String token, final String studentId,
Map<String, String> params) {
List<GenericEntity> sections = this.readEntityList(token,
SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.SECTIONS_ENTITY + "?" + this.buildQueryString(params), studentId);
// Disable filtering, so just adding section codes to sections with no name
sections = filterCurrentSections(sections, false);
return sections;
}
/**
* Get a section identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getSection(String token, String id) {
GenericEntity section = this.readEntity(token, SDKConstants.SECTIONS_ENTITY + id, id);
ensureSectionName(section);
return section;
}
/**
* Get student home room information
*
* @param token
* @param studentId
* @return
*/
@Override
public GenericEntity getSectionHomeForStudent(String token, String studentId) {
GenericEntity homeRoomEntity = null;
List<GenericEntity> studentSections = this.getSectionsForStudent(token, studentId, null);
// If only one section association exists for the student, return the
// section as home room
if (studentSections.size() == 1) {
homeRoomEntity = studentSections.get(0);
return homeRoomEntity;
}
// If multiple section associations exist for the student, return the
// section with homeroomIndicator set to true
for (GenericEntity studentSection : studentSections) {
List<Map<String, Object>> studentSectionAssocs = (List<Map<String, Object>>) studentSection
.get("studentSectionAssociation");
if (studentSectionAssocs != null) {
for (Map<String, Object> sectionAssoc : studentSectionAssocs) {
if ((sectionAssoc.get(Constants.ATTR_HOMEROOM_INDICATOR) != null)
&& ((Boolean) sectionAssoc.get(Constants.ATTR_HOMEROOM_INDICATOR))) {
homeRoomEntity = studentSection;
return homeRoomEntity;
}
}
}
}
return homeRoomEntity;
}
/**
* Get a list of courses using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getCourses(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.COURSES_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of courses for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getCoursesForStudent(String token, String studentId, Map<String, String> params) {
params.put("optionalFields", "transcript");
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + studentId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.STUDENTS + "?" + this.buildQueryString(params), studentId);
}
//@Override
@Override
public List<GenericEntity> getCoursesSectionsForSchool(String token, String schoolId) {
// get sections
List<GenericEntity> sections = null;
if (SecurityUtil.isNotEducator()) {
sections = this.readEntityList(token, SDKConstants.SCHOOLS_ENTITY + schoolId + SDKConstants.SECTIONS + "?"
+ Constants.LIMIT + "=" + Constants.MAX_RESULTS);
enrichSectionsWithSessionDetails(token, sections);
sections = filterCurrentSections(sections, true);
} else {
String teacherId = getId(token);
sections = getSectionsForTeacher(teacherId, token, null);
// filter by school id
if (schoolId != null) {
List<GenericEntity> filteredSections = new ArrayList<GenericEntity>();
for (GenericEntity section : sections) {
if (section.getString(Constants.ATTR_SCHOOL_ID) != null
&& section.getString(Constants.ATTR_SCHOOL_ID).equals(schoolId)) {
filteredSections.add(section);
}
}
sections = filteredSections;
}
}
// get courses
List<GenericEntity> courses = new ArrayList<GenericEntity>();
if (sections != null && !sections.isEmpty()) {
courses = getCourseSectionMappings(sections, token);
}
return courses;
}
/**
* Get a list of transcripts for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getTranscriptsForStudent(String token, String studentId, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId
+ SDKConstants.STUDENT_TRANSCRIPT_ASSOC + "?" + this.buildQueryString(params), studentId);
}
/**
* Get a course identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getCourse(String token, String id) {
return this.readEntity(token, SDKConstants.COURSES_ENTITY + id, id);
}
/**
* Get a list of staff members using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getStaff(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.STAFF_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get staff member information identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getStaff(String token, String id) {
return this.readEntity(token, SDKConstants.STAFF_ENTITY + id, id);
}
/**
* Get staff member information identified by id along with specified education organization of
* category
*
* @param token
* @param id
* @param organizationCategory
* @return
*/
@Override
public GenericEntity getStaffWithEducationOrganization(String token, String id, String organizationCategory) {
GenericEntity staffEntity = this.getStaff(token, id);
GenericEntity edOrgEntity = this.getEducationOrganizationForStaff(token, id, organizationCategory);
if (edOrgEntity != null) {
String edOrgSliId = edOrgEntity.getId();
staffEntity.put(SDKConstants.EDORG_SLI_ID_ATTRIBUTE, edOrgSliId);
staffEntity.put(SDKConstants.EDORG_ATTRIBUTE, edOrgEntity);
}
return staffEntity;
}
/**
* Get a list of teachers specified by a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getTeachers(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.TEACHERS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a teacher identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getTeacher(String token, String id) {
return this.readEntity(token, SDKConstants.TEACHERS_ENTITY + id, id);
}
/**
* Get the teacher for a specified section
*
* @param token
* @param sectionId
* @return
*/
@Override
public GenericEntity getTeacherForSection(String token, String sectionId) {
GenericEntity teacher = null;
List<GenericEntity> teacherSectionAssociations = this.readEntityList(token, SDKConstants.SECTIONS_ENTITY
+ sectionId + SDKConstants.TEACHER_SECTION_ASSOC + "?" + this.buildQueryString(null), sectionId);
if (teacherSectionAssociations != null) {
for (GenericEntity teacherSectionAssociation : teacherSectionAssociations) {
if (teacherSectionAssociation.getString(Constants.ATTR_CLASSROOM_POSITION).equals(
Constants.TEACHER_OF_RECORD)) {
String teacherId = teacherSectionAssociation.getString(Constants.ATTR_TEACHER_ID);
teacher = this.getTeacher(token, teacherId);
return teacher;
}
}
}
return teacher;
}
/**
* Get a list of parents for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getParentsForStudent(String token, String studentId, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.STUDENT_PARENT_ASSOC
+ SDKConstants.PARENTS + "?" + this.buildQueryString(params), studentId);
}
/**
* Get a list of all students
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getStudents(String token, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a list of students specified by a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getStudents(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.STUDENTS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of students assigned to the specified section
*
* @param token
* @param sectionId
* @return
*/
@Override
public List<GenericEntity> getStudentsForSection(String token, String sectionId) {
Map<String, String> params = new HashMap<String, String>();
String optionalParams = Constants.ATTR_ASSESSMENTS + "," + Constants.ATTR_STUDENT_ATTENDANCES_1 + ","
+ Constants.ATTR_TRANSCRIPT + "," + Constants.ATTR_GRADEBOOK;
params.put(SDKConstants.PARAM_OPTIONAL_FIELDS, optionalParams);
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + sectionId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.STUDENTS + "?" + this.buildQueryString(params), sectionId);
}
/**
* Get a list of students using name search
*
* @param token
* @param firstName
* @param lastName
* @return
*/
@Override
public List<GenericEntity> getStudentsWithSearch(String token, String firstName, String lastName) {
Map<String, String> params = new HashMap<String, String>();
if ((firstName != null) && (firstName.length() > 0)) {
params.put(SDKConstants.PARAM_FIRST_NAME, firstName);
}
if ((lastName != null) && (lastName.length() > 0)) {
params.put(SDKConstants.PARAM_LAST_NAME, lastName);
}
return this.getStudents(token, params);
}
/**
* Get a list of students in the specified section along with gradebook entries
*
* @param token
* @param sectionId
* @return
*/
@Override
public List<GenericEntity> getStudentsForSectionWithGradebookEntries(String token, String sectionId) {
Map<String, String> params = new HashMap<String, String>();
String optionalParams = Constants.ATTR_GRADEBOOK;
params.put(SDKConstants.PARAM_OPTIONAL_FIELDS, optionalParams);
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + sectionId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.STUDENTS + "?" + this.buildQueryString(params), sectionId);
}
/**
* Get a student identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getStudent(String token, String id) {
return this.readEntity(token, SDKConstants.STUDENTS_ENTITY + id, id);
}
/**
* Get a student identified by id including specified optional information
*
* @param token
* @param id
* @param optionalFields
* @return
*/
@Override
public GenericEntity getStudentWithOptionalFields(String token, String id, List<String> optionalFields) {
Map<String, String> params = new HashMap<String, String>();
String optionalParams = this.buildListString(optionalFields);
params.put(SDKConstants.PARAM_OPTIONAL_FIELDS, optionalParams);
return this.readEntity(token, SDKConstants.STUDENTS_ENTITY + id + "?" + this.buildQueryString(params), id);
}
/**
* Get a list of school enrollments for the given student id
*
* @param token
* @param student
* @return
*/
@Override
public List<GenericEntity> getEnrollmentForStudent(String token, String studentId) {
Map<String, String> params = new HashMap<String, String>();
params.put(SDKConstants.PARAM_SORT_BY, SDKConstants.PARAM_ENTRY_DATE);
params.put(SDKConstants.PARAM_SORT_ORDER, SDKConstants.PARAM_SORT_ORDER_DESCENDING);
List<GenericEntity> studentSchoolAssociations = this.readEntityList(token, SDKConstants.STUDENTS_ENTITY
+ studentId + SDKConstants.STUDENT_SCHOOL_ASSOC + "?" + this.buildQueryString(params), studentId);
for (GenericEntity studentSchoolAssociation : studentSchoolAssociations) {
studentSchoolAssociation = GenericEntityEnhancer.enhanceStudentSchoolAssociation(studentSchoolAssociation);
String schoolId = (String) studentSchoolAssociation.get(Constants.ATTR_SCHOOL_ID);
// Retrieve the school for the corresponding student school association
GenericEntity school = this.getSchool(token, schoolId);
studentSchoolAssociation.put(Constants.ATTR_SCHOOL, school);
}
return studentSchoolAssociations;
}
/**
* Get a list of attendances for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getAttendanceForStudent(String token, String studentId, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.ATTENDANCES_ENTITY
+ "?" + this.buildQueryString(params), studentId);
}
/**
* Get a list of academic records for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getAcademicRecordsForStudent(String token, String studentId, Map<String, String> params) {
if (params != null) {
params.put(SDKConstants.PARAM_STUDENT_ID, studentId);
}
return this.readEntityList(token, SDKConstants.ACADEMIC_RECORDS_ENTITY + "?" + this.buildQueryString(params),
studentId);
}
/**
* Get a list of assessments using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getAssessments(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.ASSESSMENTS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of assessments for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getAssessmentsForStudent(String token, String studentId) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.STUDENT_ASSMT_ASSOC
+ "?" + this.buildQueryString(null), studentId);
}
/**
* Get an assessment identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getAssessment(String token, String id) {
return this.readEntity(token, SDKConstants.ASSESSMENTS_ENTITY + id, id);
}
/*
* *****************************************************
* Core API SDK Methods
* *****************************************************
*/
/**
* Read a custom entity using the SDK
*
* @param token
* @param url
* @param entityClass
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected Object readCustomEntity(String token, String url, Class entityClass) {
Object entity = null;
try {
List<Object> entityList = new ArrayList<Object>();
sdkClient.read(token, entityList, url, entityClass);
if (entityList.size() > 0) {
entity = entityList.get(0);
}
} catch (Exception e) {
LOGGER.error("Exception occurred during API read", e);
}
return entity;
}
/**
* Read a resource entity using the SDK
*
* @param token
* @param url
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected GenericEntity readEntity(String token, String url) {
GenericEntity entity = null;
try {
List<GenericEntity> entityList = new ArrayList<GenericEntity>();
sdkClient.read(token, entityList, url, GenericEntity.class);
if (entityList.size() > 0) {
entity = entityList.get(0);
}
} catch (Exception e) {
LOGGER.error("Exception occurred during API read", e);
}
return entity;
}
/**
* Read a resource entity using the SDK
*
* @param token
* @param url
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected GenericEntity readEntity(String token, String url, String id) {
if ((id == null) || (id.length() <= 0)) {
return null;
} else {
return readEntity(token, url);
}
}
/**
* Read a list of resource entities using the SDK
*
* @param token
* @param url
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected List<GenericEntity> readEntityList(String token, String url) {
List<GenericEntity> entityList = new ArrayList<GenericEntity>();
try {
sdkClient.read(token, entityList, url, GenericEntity.class);
} catch (Exception e) {
LOGGER.error("Exception occurred during API read", e);
}
return entityList;
}
/**
* Read a list of resource entities using the SDK. This method checks id for
* null or size == 0 and returns Collections.emptyList iff true.
*
* @param token
* @param url
* @param id
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected List<GenericEntity> readEntityList(String token, String url, List id) {
if (id == null || id.size() == 0) {
return Collections.emptyList();
} else {
return readEntityList(token, url);
}
}
/**
* Read a list of resource entities using the SDK. This method checks id for
* null or length == 0 and returns defaultList iff true.
*
* @param token
* @param url
* @param id
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected List<GenericEntity> readEntityList(String token, String url, String id) {
if (id == null || id.length() <= 0) {
return Collections.emptyList();
} else {
return readEntityList(token, url);
}
}
/**
* Create a resource entity using the SDK
*
* @param token
* @param url
* @param entity
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected void createEntity(String token, String url, GenericEntity entity) {
try {
sdkClient.create(token, url, entity);
} catch (Exception e) {
LOGGER.error("Exception occurred during API create", e);
}
}
/**
* Update a resource entity using the SDK
*
* @param token
* @param url
* @param entity
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected void updateEntity(String token, String url, GenericEntity entity) {
try {
sdkClient.update(token, url, entity);
} catch (Exception e) {
LOGGER.error("Exception occurred during API update", e);
}
}
/**
* Delete a resource entity using the SDK
*
* @param token
* @param url
* @param entity
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected void deleteEntity(String token, String url) {
try {
sdkClient.deleteByToken(token, url);
} catch (Exception e) {
LOGGER.error("Exception occurred during API delete", e);
}
}
/*
* *****************************************************
* API Helper Methods
* *****************************************************
*/
/**
* Given a link in the API response, extract the entity's unique id
*
* @param link
* @return
*/
private String parseId(Map linkMap) {
String id;
int index = ((String) (linkMap.get(Constants.ATTR_HREF))).lastIndexOf("/");
id = ((String) (linkMap.get(Constants.ATTR_HREF))).substring(index + 1);
return id;
}
/**
* Extract the specified attribute's value from each entity in the given entity list
*
* @param entities
* @param attributeName
* @return
*/
private List<String> extractAttributesFromEntities(List<GenericEntity> entities, String attributeName) {
List<String> attributeList = new ArrayList<String>();
if (entities != null) {
for (GenericEntity entity : entities) {
String attributeValue = (String) entity.get(attributeName);
if ((attributeValue != null) && (attributeValue.length() > 0)) {
attributeList.add(attributeValue);
}
}
}
return attributeList;
}
/**
* Extract the link with the given relationship from an entity
*
* @param entity
* @param rel
* @return
*/
private List<String> extractLinksFromEntity(GenericEntity entity, String rel) {
List<String> linkList = new ArrayList<String>();
if (entity != null && entity.containsKey(Constants.ATTR_LINKS)) {
for (Map link : (List<Map>) (entity.get(Constants.ATTR_LINKS))) {
if (link.get(Constants.ATTR_REL).toString().contains(rel)) {
String href = (String) link.get(Constants.ATTR_HREF);
linkList.add(href);
}
}
}
return linkList;
}
/**
* Enrich section entities with session details to be leveraged during filtering
*
* @param token
* @param sections
*/
private void enrichSectionsWithSessionDetails(String token, List<GenericEntity> sections) {
List<GenericEntity> sessions = this.getSessions(token, null);
if ((sessions != null) && (sections != null)) {
// Setup sessions lookup map
Map<String, GenericEntity> sessionMap = new HashMap<String, GenericEntity>();
for (GenericEntity session : sessions) {
sessionMap.put(session.getId(), session);
}
// Enrich each section with session entity
for (GenericEntity section : sections) {
String sessionIdAttribute = (String) section.get(Constants.ATTR_SESSION_ID);
if (sessionIdAttribute != null) {
GenericEntity session = sessionMap.get(sessionIdAttribute);
section.put(Constants.ATTR_SESSION, session);
}
}
}
}
/**
* Process sections to ensure section name and filter historical data if specified
*
* @param sections
* @param filterHistoricalData
* @return
*/
private List<GenericEntity> filterCurrentSections(List<GenericEntity> sections, boolean filterHistoricalData) {
List<GenericEntity> filteredSections = sections;
if (filterHistoricalData) {
filteredSections = new ArrayList<GenericEntity>();
}
if (sections != null && sections.size() > 0) {
// Setup grace period date
Calendar gracePeriodCalendar = Calendar.getInstance();
gracePeriodCalendar.setTimeInMillis(System.currentTimeMillis());
try {
if (gracePeriod != null && !gracePeriod.equals("")) {
int daysToSubtract = Integer.parseInt(gracePeriod) * -1;
gracePeriodCalendar.add(Calendar.DATE, daysToSubtract);
}
} catch (NumberFormatException exception) {
LOGGER.warn("Invalid grace period: {}", exception.getMessage());
}
for (GenericEntity section : sections) {
// Ensure section name
ensureSectionName(section);
// Filter historical sections/sessions if necessary
if (filterHistoricalData) {
Map<String, Object> session = (Map<String, Object>) section.get(Constants.ATTR_SESSION);
// Verify section has been enriched with session details
if (session != null) {
try {
// Setup session end date
String endDateAttribute = (String) session.get(Constants.ATTR_SESSION_END_DATE);
DateFormat formatter = new SimpleDateFormat(Constants.ATTR_DATE_FORMAT);
Date sessionEndDate = formatter.parse(endDateAttribute);
Calendar sessionEndCalendar = Calendar.getInstance();
sessionEndCalendar.setTimeInMillis(sessionEndDate.getTime());
// Add filtered section if grace period adjusted date is before
// or equal to session end date
if (gracePeriodCalendar.compareTo(sessionEndCalendar) <= 0) {
filteredSections.add(section);
}
} catch (IllegalArgumentException exception) {
LOGGER.warn("Invalid session date formatter configuration: {}", exception.getMessage());
} catch (ParseException exception) {
LOGGER.warn("Invalid session date format: {}", exception.getMessage());
}
}
}
}
}
return filteredSections;
}
/**
* Match schools and sections. Also retrieve course info.
*
* @param sections
* @param token
* @return
*/
private List<GenericEntity> matchSchoolsAndSections(List<GenericEntity> schools, List<GenericEntity> sections,
String token) {
// collect associated course first.
HashMap<String, GenericEntity> courseMap = new HashMap<String, GenericEntity>();
HashMap<String, String> sectionIDToCourseIDMap = new HashMap<String, String>();
getCourseSectionsMappings(sections, token, courseMap, sectionIDToCourseIDMap);
// now collect associated schools.
HashMap<String, GenericEntity> schoolMap = new HashMap<String, GenericEntity>();
HashMap<String, String> sectionIDToSchoolIDMap = new HashMap<String, String>();
getSchoolSectionsMappings(sections, token, schools, schoolMap, sectionIDToSchoolIDMap);
// Now associate course and school.
// There is no direct course-school association in ed-fi. For any section associated to
// a school, its course will also be associated.
HashMap<String, HashSet<String>> schoolIDToCourseIDMap = new HashMap<String, HashSet<String>>();
if (sections != null) {
for (int i = 0; i < sections.size(); i++) {
GenericEntity section = sections.get(i);
if (sectionIDToSchoolIDMap.containsKey(section.get(Constants.ATTR_ID))
&& sectionIDToCourseIDMap.containsKey(section.get(Constants.ATTR_ID))) {
String schoolId = sectionIDToSchoolIDMap.get(section.get(Constants.ATTR_ID));
String courseId = sectionIDToCourseIDMap.get(section.get(Constants.ATTR_ID));
if (!schoolIDToCourseIDMap.containsKey(schoolId)) {
schoolIDToCourseIDMap.put(schoolId, new HashSet<String>());
}
schoolIDToCourseIDMap.get(schoolId).add(courseId);
}
}
}
// now create the generic entity
for (String schoolId : schoolIDToCourseIDMap.keySet()) {
GenericEntity s = schoolMap.get(schoolId);
for (String courseId : schoolIDToCourseIDMap.get(schoolId)) {
GenericEntity c = courseMap.get(courseId);
s.appendToList(Constants.ATTR_COURSES, c);
}
}
return new ArrayList<GenericEntity>(schoolMap.values());
}
/**
* Get the associations between courses and sections
*/
private List<GenericEntity> getCourseSectionMappings(List<GenericEntity> sections, String token) {
Map<String, GenericEntity> courseMap = new HashMap<String, GenericEntity>();
Map<String, String> sectionIDToCourseIDMap = new HashMap<String, String>();
// this temporary sectionLookup will be used for cross reference between
// courseId and
// section.
Map<String, Set<GenericEntity>> sectionLookup = new HashMap<String, Set<GenericEntity>>();
// iterate each section
if (sections != null) {
Map<String, String> courseOfferingToCourseIDMap = new HashMap<String, String>();
// find the course for each course offering
List<GenericEntity> courseOfferings = readEntityList(token, SDKConstants.COURSE_OFFERINGS + "?" + this.buildQueryString(null));
if (courseOfferings != null) {
for (GenericEntity courseOffering : courseOfferings) {
// Get course using courseId reference in section
String courseOfferingId = (String) courseOffering.get(Constants.ATTR_ID);
String courseId = (String) courseOffering.get(Constants.ATTR_COURSE_ID);
courseOfferingToCourseIDMap.put(courseOfferingId, courseId);
}
}
for (GenericEntity section : sections) {
// Get course using courseId reference in section
String courseOfferingId = (String) section.get(Constants.ATTR_COURSE_OFFERING_ID);
String courseId = courseOfferingToCourseIDMap.get(courseOfferingId);
if (!sectionLookup.containsKey(courseId)) {
sectionLookup.put(courseId, new HashSet<GenericEntity>());
}
sectionLookup.get(courseId).add(section);
}
// get course Entity
List<GenericEntity> courses = readEntityList(token, SDKConstants.COURSES_ENTITY + "?" + this.buildQueryString(null));
// update courseMap with courseId. "id" for this entity
for (GenericEntity course : courses) {
// Add course to courseMap
//courseMap.put(course.getId(), course);
Set<GenericEntity> matchedSections = sectionLookup.get(course.getId());
if (matchedSections != null) {
// Add course to courseMap
courseMap.put(course.getId(), course);
Iterator<GenericEntity> sectionEntities = matchedSections.iterator();
while (sectionEntities.hasNext()) {
GenericEntity sectionEntity = sectionEntities.next();
course.appendToList(Constants.ATTR_SECTIONS, sectionEntity);
// update sectionIdToCourseIdMap
sectionIDToCourseIDMap.put(sectionEntity.getId(), course.getId());
}
}
}
}
return new ArrayList<GenericEntity>(courseMap.values());
}
/**
* Get the associations between courses and sections
*/
private void getCourseSectionsMappings(List<GenericEntity> sections, String token,
Map<String, GenericEntity> courseMap, Map<String, String> sectionIDToCourseIDMap) {
// this variable is used to prevent sending duplicate courseId to API
Set<String> courseIdTracker = new HashSet<String>();
// this temporary sectionLookup will be used for cross reference between
// courseId and
// section.
Map<String, Set<GenericEntity>> sectionLookup = new HashMap<String, Set<GenericEntity>>();
List<String> courseIds = new ArrayList<String>();
// iterate each section
if (sections != null) {
for (GenericEntity section : sections) {
// Get course using courseId reference in section
String courseId = (String) section.get(Constants.ATTR_COURSE_ID);
// search course which doesn't exist already
if (!courseMap.containsKey(courseId)) {
if (!courseIdTracker.contains(courseId)) {
courseIds.add(courseId);
courseIdTracker.add(courseId);
}
if (!sectionLookup.containsKey(courseId)) {
sectionLookup.put(courseId, new HashSet<GenericEntity>());
}
sectionLookup.get(courseId).add(section);
}
}
}
// get Entities by given courseIds
if (courseIds.size() > 0) {
// get course Entities
List<GenericEntity> courses = getCourses(token, courseIds, null);
// update courseMap with courseId. "id" for this entity
for (GenericEntity course : courses) {
// Add course to courseMap
courseMap.put(course.getId(), course);
Set<GenericEntity> matchedSections = sectionLookup.get(course.getId());
if (matchedSections != null) {
Iterator<GenericEntity> sectionEntities = matchedSections.iterator();
while (sectionEntities.hasNext()) {
GenericEntity sectionEntity = sectionEntities.next();
course.appendToList(Constants.ATTR_SECTIONS, sectionEntity);
// update sectionIdToCourseIdMap
sectionIDToCourseIDMap.put(sectionEntity.getId(), course.getId());
}
}
}
}
}
/**
* Get the associations between schools and sections
*/
private void getSchoolSectionsMappings(List<GenericEntity> sections, String token, List<GenericEntity> schools,
Map<String, GenericEntity> schoolMap, Map<String, String> sectionIDToSchoolIDMap) {
// temporary cross reference between schoolId and sections
Map<String, Set<GenericEntity>> sectionLookup = new HashMap<String, Set<GenericEntity>>();
// iterate each section
if (sections != null) {
for (GenericEntity section : sections) {
String schoolId = (String) section.get(Constants.ATTR_SCHOOL_ID);
// search school which doesn't exist already
if (!schoolMap.containsKey(schoolId)) {
if (!sectionLookup.containsKey(schoolId)) {
sectionLookup.put(schoolId, new HashSet<GenericEntity>());
}
sectionLookup.get(schoolId).add(section);
}
}
}
if (schools != null) {
// update schoolMap with schoolId. "id" for this entity
for (GenericEntity school : schools) {
String schoolId = school.getId();
Set<GenericEntity> matchedSections = sectionLookup.get(schoolId);
if (matchedSections != null) {
for (GenericEntity sectionEntity : matchedSections) {
// Add school to schoolmap
schoolMap.put(school.getId(), school);
// update sectionIdToSchoolIdMap
sectionIDToSchoolIDMap.put(sectionEntity.getId(), schoolId);
}
}
}
}
}
private void ensureSectionName(GenericEntity section) {
if ((section != null) && (section.get(Constants.ATTR_SECTION_NAME) == null)) {
section.put(Constants.ATTR_SECTION_NAME, section.get(Constants.ATTR_UNIQUE_SECTION_CODE));
}
}
/**
* Builds a comma-separated string from the given string item list
*
* @param items
* @return
*/
private String buildListString(List<String> items) {
return (items == null) ? "" : StringUtils.join(items, ",");
}
/**
* Builds a query string from the given parameter map
*
* @param params
* @return
*/
private String buildQueryString(Map<String, String> params) {
StringBuilder query = new StringBuilder();
String separator = "";
// Setup defaults including paging disabled
if (params == null) {
params = new HashMap<String, String>();
}
if (!params.containsKey(Constants.LIMIT)) {
params.put(Constants.LIMIT, String.valueOf(Constants.MAX_RESULTS));
}
for (Map.Entry<String, String> e : params.entrySet()) {
query.append(separator);
separator = "&";
query.append(e.getKey());
query.append("=");
query.append(e.getValue());
}
return query.toString();
}
}
|
sli/dashboard/src/main/java/org/slc/sli/dashboard/client/SDKAPIClient.java
|
/*
* Copyright 2012 Shared Learning Collaborative, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.dashboard.client;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.slc.sli.api.client.SLIClient;
import org.slc.sli.dashboard.entity.ConfigMap;
import org.slc.sli.dashboard.entity.GenericEntity;
import org.slc.sli.dashboard.entity.util.GenericEntityEnhancer;
import org.slc.sli.dashboard.util.Constants;
import org.slc.sli.dashboard.util.ExecutionTimeLogger;
import org.slc.sli.dashboard.util.JsonConverter;
import org.slc.sli.dashboard.util.SecurityUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This client will use the SDK client to communicate with the SLI API.
*
* @author dwalker
* @author rbloh
* @author iivanisevic
*
*/
public class SDKAPIClient implements APIClient {
private static final Logger LOGGER = LoggerFactory.getLogger(SDKAPIClient.class);
private SLIClient sdkClient;
private String gracePeriod;
/**
* Wrapper for value for the custom store - value is expected json object vs primitive
*
*/
public static class CustomEntityWrapper {
String value;
public CustomEntityWrapper(String value) {
this.value = value;
}
}
/*
* *****************************************************
* API Client Interface Methods
* *****************************************************
*/
/**
* Set the SDK client
*
* @param sdkClient
*/
public void setSdkClient(SLIClient sdkClient) {
this.sdkClient = sdkClient;
}
/**
* Get the SDK client
*
* @return
*/
@Override
public SLIClient getSdkClient() {
return sdkClient;
}
/**
* Set the SLI configured grace period for historical access
*
* @param gracePeriod
*/
public void setGracePeriod(String gracePeriod) {
this.gracePeriod = gracePeriod;
}
/**
* Get the SLI configured grace period for historical access
*
* @return
*/
// @Override
public String getGracePeriod() {
return this.gracePeriod;
}
/**
* Get a resource entity of a specified type which is identified by id and enriched using
* optional parameters
*
* @param token
* @param type
* @param id
* @param params
* @return
*/
@Override
public GenericEntity getEntity(String token, String type, String id, Map<String, String> params) {
return this.readEntity(token, "/" + type + "/" + id + "?" + this.buildQueryString(params), id);
}
/**
* Get a list of resource entities of a specified type which are identified by a list of ids and
* enriched using optional parameters
*
* @param token
* @param type
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getEntities(String token, String type, String ids, Map<String, String> params) {
return this.readEntityList(token, "/" + type + "/" + ids + "?" + this.buildQueryString(params), ids);
}
/**
* Get user's home entity
*
* @param token
* @return
*/
@Override
public GenericEntity getHome(String token) {
return this.readEntity(token, SDKConstants.HOME_ENTITY);
}
/**
* Get the user's unique identifier
*
* @param token
* @return
*/
@Override
public String getId(String token) {
String id = null;
GenericEntity homeEntity = this.getHome(token);
if (homeEntity != null) {
for (Map linkMap : (List<Map>) (homeEntity.get(Constants.ATTR_LINKS))) {
if (linkMap.get(Constants.ATTR_REL).equals(Constants.ATTR_SELF)) {
id = parseId(linkMap);
}
}
}
return id;
}
/**
* Get EdOrg custom data
*
* @param token
* @param id
* @return
*/
@Override
public ConfigMap getEdOrgCustomData(String token, String id) {
GenericEntity ge = (GenericEntity) readCustomEntity(token, SDKConstants.EDORGS_ENTITY + id + SDKConstants.CUSTOM_DATA, GenericEntity.class);
return JsonConverter.fromJson((String) ge.get("config"), ConfigMap.class);
}
/**
* Store EdOrg custom data
*
* @param token
* @param id
* @param configMap
*/
@Override
public void putEdOrgCustomData(String token, String id, ConfigMap configMap) {
GenericEntity configMapEntity = new GenericEntity();
configMapEntity.put("config", JsonConverter.toJson(configMap));
this.createEntity(token, SDKConstants.EDORGS_ENTITY + id + SDKConstants.CUSTOM_DATA, configMapEntity);
}
/**
* Get a list of educational organizations using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getEducationalOrganizations(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.EDORGS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get education organizations for staff member identified by id
*
* @param token
* @param staffId
* @return
*/
@Override
public List<GenericEntity> getEducationOrganizationsForStaff(String token, String staffId) {
return this.readEntityList(token, SDKConstants.STAFF_ENTITY + staffId
+ SDKConstants.STAFF_EDORG_ASSIGNMENT_ASSOC + SDKConstants.EDORGS, staffId);
}
/**
* Get an educational organization identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getEducationalOrganization(String token, String id) {
return this.readEntity(token, SDKConstants.EDORGS_ENTITY + id, id);
}
/**
* Get education organizations for staff member identified by id and matching organization
* category or first if not specified
*
* @param token
* @param staffId
* @param organizationCategory
* @return
*/
@Override
public GenericEntity getEducationOrganizationForStaff(String token, String staffId, String organizationCategory) {
GenericEntity staffEdOrg = null;
List<GenericEntity> edOrgs = this.readEntityList(token, SDKConstants.STAFF_ENTITY + staffId
+ SDKConstants.STAFF_EDORG_ASSIGNMENT_ASSOC + SDKConstants.EDORGS, staffId);
if ((organizationCategory != null) && (organizationCategory.length() > 0)) {
for (GenericEntity edOrg : edOrgs) {
List<String> edOrgCategories = (List<String>) edOrg.get(Constants.ATTR_ORG_CATEGORIES);
if (edOrgCategories != null && edOrgCategories.size() > 0) {
for (String edOrgCategory : edOrgCategories) {
if (edOrgCategory.equals(organizationCategory)) {
staffEdOrg = edOrg;
break;
}
}
}
}
} else if (edOrgs.size() > 0) {
staffEdOrg = edOrgs.get(0);
}
return staffEdOrg;
}
/**
* Get parent educational organizations for the supplied edOrgs
*
* @param token
* @param educationalOrganizations
* @return
*/
@Override
public List<GenericEntity> getParentEducationalOrganizations(String token,
List<GenericEntity> educationalOrganizations) {
List<String> ids = this.extractAttributesFromEntities(educationalOrganizations, Constants.ATTR_PARENT_EDORG);
return this.getEducationalOrganizations(token, ids, null);
}
/**
* Get parent educational organization for the supplied edOrg
*
* @param token
* @param educationalOrganization
* @return
*/
@Override
public GenericEntity getParentEducationalOrganization(String token, GenericEntity educationalOrganization) {
GenericEntity parentEducationOrganization = null;
List<GenericEntity> educationalOrganizations = new ArrayList<GenericEntity>();
educationalOrganizations.add(educationalOrganization);
List<String> ids = this.extractAttributesFromEntities(educationalOrganizations, Constants.ATTR_PARENT_EDORG);
if (ids.size() > 0) {
String parentId = ids.get(0);
parentEducationOrganization = this.getEducationalOrganization(token, parentId);
}
return parentEducationOrganization;
}
/**
* Get a list of all schools depending upon user role
*
* @param token
* @param ids
* @return
*/
@Override
public List<GenericEntity> getSchools(String token, List<String> ids) {
// get schools
List<GenericEntity> schools = this.readEntityList(token, SDKConstants.SCHOOLS_ENTITY + "?" + this.buildQueryString(null));
return schools;
}
public List<GenericEntity> getMySchools(String token, List<String> ids, boolean isEducator) {
List<GenericEntity> schools;
if (isEducator) {
// get schools
schools = this.readEntityList(token,
SDKConstants.TEACHERS_ENTITY + getId(token) + SDKConstants.TEACHER_SCHOOL_ASSOC
+ SDKConstants.SCHOOLS_ENTITY + "?" + this.buildQueryString(null));
} else {
schools = this.readEntityList(token,
SDKConstants.STAFF_ENTITY + getId(token) + SDKConstants.STAFF_EDORG_ASSIGNMENT_ASSOC
+ SDKConstants.SCHOOLS_ENTITY + "?" + this.buildQueryString(null));
}
return schools;
}
/**
* Get a list of schools using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getSchools(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.SCHOOLS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a school identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getSchool(String token, String id) {
return this.readEntity(token, SDKConstants.SCHOOLS_ENTITY + id, id);
}
/**
* Get a list of all sessions
*
* @param token
* @param params
* @return
*/
@Override
public List<GenericEntity> getSessions(String token, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.SESSIONS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a list of sessions using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getSessions(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.SESSIONS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of sessions for the specified school year
*
* @param token
* @param schoolYear
* @return
*/
@Override
public List<GenericEntity> getSessionsForYear(String token, String schoolYear) {
Map<String, String> params = new HashMap<String, String>();
params.put("schoolYear", schoolYear);
return this.readEntityList(token, SDKConstants.SESSIONS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a session identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getSession(String token, String id) {
return this.readEntity(token, SDKConstants.SESSIONS_ENTITY + id, id);
}
/**
* Get a list of all sections
*
* @param token
* @param params
* @return
*/
@Override
public List<GenericEntity> getSections(String token, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a list of sections using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getSections(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.SECTIONS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get all sections for a non-Educator
*
* @param token
* @param params
* @return
*/
@Override
public List<GenericEntity> getSectionsForNonEducator(String token, Map<String, String> params) {
List<GenericEntity> sections = this.getSections(token, params);
// Enrich sections with session details
enrichSectionsWithSessionDetails(token, sections);
// Enable filtering
sections = filterCurrentSections(sections, true);
return sections;
}
/**
* Get all sections for a Teacher
*
* @param token
* @param teacherId
* @param params
* @return
*/
@Override
public List<GenericEntity> getSectionsForTeacher(String teacherId, String token, Map<String, String> params) {
List<GenericEntity> sections = this.readEntityList(token,
SDKConstants.TEACHERS_ENTITY + teacherId + SDKConstants.TEACHER_SECTION_ASSOC
+ SDKConstants.SECTIONS_ENTITY + "?" + this.buildQueryString(params), teacherId);
// Disable filtering, so just adding section codes to sections with no name
sections = filterCurrentSections(sections, false);
return sections;
}
/**
* Get a list of sections for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getSectionsForStudent(final String token, final String studentId,
Map<String, String> params) {
List<GenericEntity> sections = this.readEntityList(token,
SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.SECTIONS_ENTITY + "?" + this.buildQueryString(params), studentId);
// Disable filtering, so just adding section codes to sections with no name
sections = filterCurrentSections(sections, false);
return sections;
}
/**
* Get a section identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getSection(String token, String id) {
GenericEntity section = this.readEntity(token, SDKConstants.SECTIONS_ENTITY + id, id);
ensureSectionName(section);
return section;
}
/**
* Get student home room information
*
* @param token
* @param studentId
* @return
*/
@Override
public GenericEntity getSectionHomeForStudent(String token, String studentId) {
GenericEntity homeRoomEntity = null;
List<GenericEntity> studentSections = this.getSectionsForStudent(token, studentId, null);
// If only one section association exists for the student, return the
// section as home room
if (studentSections.size() == 1) {
homeRoomEntity = studentSections.get(0);
return homeRoomEntity;
}
// If multiple section associations exist for the student, return the
// section with homeroomIndicator set to true
for (GenericEntity studentSection : studentSections) {
List<Map<String, Object>> studentSectionAssocs = (List<Map<String, Object>>) studentSection
.get("studentSectionAssociation");
if (studentSectionAssocs != null) {
for (Map<String, Object> sectionAssoc : studentSectionAssocs) {
if ((sectionAssoc.get(Constants.ATTR_HOMEROOM_INDICATOR) != null)
&& ((Boolean) sectionAssoc.get(Constants.ATTR_HOMEROOM_INDICATOR))) {
homeRoomEntity = studentSection;
return homeRoomEntity;
}
}
}
}
return homeRoomEntity;
}
/**
* Get a list of courses using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getCourses(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.COURSES_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of courses for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getCoursesForStudent(String token, String studentId, Map<String, String> params) {
params.put("optionalFields", "transcript");
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + studentId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.STUDENTS + "?" + this.buildQueryString(params), studentId);
}
//@Override
@Override
public List<GenericEntity> getCoursesSectionsForSchool(String token, String schoolId) {
// get sections
List<GenericEntity> sections = null;
if (SecurityUtil.isNotEducator()) {
sections = this.readEntityList(token, SDKConstants.SCHOOLS_ENTITY + schoolId + SDKConstants.SECTIONS + "?"
+ Constants.LIMIT + "=" + Constants.MAX_RESULTS);
enrichSectionsWithSessionDetails(token, sections);
sections = filterCurrentSections(sections, true);
} else {
String teacherId = getId(token);
sections = getSectionsForTeacher(teacherId, token, null);
// filter by school id
if (schoolId != null) {
List<GenericEntity> filteredSections = new ArrayList<GenericEntity>();
for (GenericEntity section : sections) {
if (section.getString(Constants.ATTR_SCHOOL_ID) != null
&& section.getString(Constants.ATTR_SCHOOL_ID).equals(schoolId)) {
filteredSections.add(section);
}
}
sections = filteredSections;
}
}
// get courses
List<GenericEntity> courses = new ArrayList<GenericEntity>();
if (sections != null && !sections.isEmpty()) {
courses = getCourseSectionMappings(sections, token);
}
return courses;
}
/**
* Get a list of transcripts for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getTranscriptsForStudent(String token, String studentId, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId
+ SDKConstants.STUDENT_TRANSCRIPT_ASSOC + "?" + this.buildQueryString(params), studentId);
}
/**
* Get a course identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getCourse(String token, String id) {
return this.readEntity(token, SDKConstants.COURSES_ENTITY + id, id);
}
/**
* Get a list of staff members using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getStaff(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.STAFF_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get staff member information identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getStaff(String token, String id) {
return this.readEntity(token, SDKConstants.STAFF_ENTITY + id, id);
}
/**
* Get staff member information identified by id along with specified education organization of
* category
*
* @param token
* @param id
* @param organizationCategory
* @return
*/
@Override
public GenericEntity getStaffWithEducationOrganization(String token, String id, String organizationCategory) {
GenericEntity staffEntity = this.getStaff(token, id);
GenericEntity edOrgEntity = this.getEducationOrganizationForStaff(token, id, organizationCategory);
if (edOrgEntity != null) {
String edOrgSliId = edOrgEntity.getId();
staffEntity.put(SDKConstants.EDORG_SLI_ID_ATTRIBUTE, edOrgSliId);
staffEntity.put(SDKConstants.EDORG_ATTRIBUTE, edOrgEntity);
}
return staffEntity;
}
/**
* Get a list of teachers specified by a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getTeachers(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.TEACHERS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a teacher identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getTeacher(String token, String id) {
return this.readEntity(token, SDKConstants.TEACHERS_ENTITY + id, id);
}
/**
* Get the teacher for a specified section
*
* @param token
* @param sectionId
* @return
*/
@Override
public GenericEntity getTeacherForSection(String token, String sectionId) {
GenericEntity teacher = null;
List<GenericEntity> teacherSectionAssociations = this.readEntityList(token, SDKConstants.SECTIONS_ENTITY
+ sectionId + SDKConstants.TEACHER_SECTION_ASSOC + "?" + this.buildQueryString(null), sectionId);
if (teacherSectionAssociations != null) {
for (GenericEntity teacherSectionAssociation : teacherSectionAssociations) {
if (teacherSectionAssociation.getString(Constants.ATTR_CLASSROOM_POSITION).equals(
Constants.TEACHER_OF_RECORD)) {
String teacherId = teacherSectionAssociation.getString(Constants.ATTR_TEACHER_ID);
teacher = this.getTeacher(token, teacherId);
return teacher;
}
}
}
return teacher;
}
/**
* Get a list of parents for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getParentsForStudent(String token, String studentId, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.STUDENT_PARENT_ASSOC
+ SDKConstants.PARENTS + "?" + this.buildQueryString(params), studentId);
}
/**
* Get a list of all students
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getStudents(String token, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + "?" + this.buildQueryString(params));
}
/**
* Get a list of students specified by a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getStudents(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.STUDENTS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of students assigned to the specified section
*
* @param token
* @param sectionId
* @return
*/
@Override
public List<GenericEntity> getStudentsForSection(String token, String sectionId) {
Map<String, String> params = new HashMap<String, String>();
String optionalParams = Constants.ATTR_ASSESSMENTS + "," + Constants.ATTR_STUDENT_ATTENDANCES_1 + ","
+ Constants.ATTR_TRANSCRIPT + "," + Constants.ATTR_GRADEBOOK;
params.put(SDKConstants.PARAM_OPTIONAL_FIELDS, optionalParams);
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + sectionId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.STUDENTS + "?" + this.buildQueryString(params), sectionId);
}
/**
* Get a list of students using name search
*
* @param token
* @param firstName
* @param lastName
* @return
*/
@Override
public List<GenericEntity> getStudentsWithSearch(String token, String firstName, String lastName) {
Map<String, String> params = new HashMap<String, String>();
if ((firstName != null) && (firstName.length() > 0)) {
params.put(SDKConstants.PARAM_FIRST_NAME, firstName);
}
if ((lastName != null) && (lastName.length() > 0)) {
params.put(SDKConstants.PARAM_LAST_NAME, lastName);
}
return this.getStudents(token, params);
}
/**
* Get a list of students in the specified section along with gradebook entries
*
* @param token
* @param sectionId
* @return
*/
@Override
public List<GenericEntity> getStudentsForSectionWithGradebookEntries(String token, String sectionId) {
Map<String, String> params = new HashMap<String, String>();
String optionalParams = Constants.ATTR_GRADEBOOK;
params.put(SDKConstants.PARAM_OPTIONAL_FIELDS, optionalParams);
return this.readEntityList(token, SDKConstants.SECTIONS_ENTITY + sectionId + SDKConstants.STUDENT_SECTION_ASSOC
+ SDKConstants.STUDENTS + "?" + this.buildQueryString(params), sectionId);
}
/**
* Get a student identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getStudent(String token, String id) {
return this.readEntity(token, SDKConstants.STUDENTS_ENTITY + id, id);
}
/**
* Get a student identified by id including specified optional information
*
* @param token
* @param id
* @param optionalFields
* @return
*/
@Override
public GenericEntity getStudentWithOptionalFields(String token, String id, List<String> optionalFields) {
Map<String, String> params = new HashMap<String, String>();
String optionalParams = this.buildListString(optionalFields);
params.put(SDKConstants.PARAM_OPTIONAL_FIELDS, optionalParams);
return this.readEntity(token, SDKConstants.STUDENTS_ENTITY + id + "?" + this.buildQueryString(params), id);
}
/**
* Get a list of school enrollments for the given student id
*
* @param token
* @param student
* @return
*/
@Override
public List<GenericEntity> getEnrollmentForStudent(String token, String studentId) {
Map<String, String> params = new HashMap<String, String>();
params.put(SDKConstants.PARAM_SORT_BY, SDKConstants.PARAM_ENTRY_DATE);
params.put(SDKConstants.PARAM_SORT_ORDER, SDKConstants.PARAM_SORT_ORDER_DESCENDING);
List<GenericEntity> studentSchoolAssociations = this.readEntityList(token, SDKConstants.STUDENTS_ENTITY
+ studentId + SDKConstants.STUDENT_SCHOOL_ASSOC + "?" + this.buildQueryString(params), studentId);
for (GenericEntity studentSchoolAssociation : studentSchoolAssociations) {
studentSchoolAssociation = GenericEntityEnhancer.enhanceStudentSchoolAssociation(studentSchoolAssociation);
String schoolId = (String) studentSchoolAssociation.get(Constants.ATTR_SCHOOL_ID);
// Retrieve the school for the corresponding student school association
GenericEntity school = this.getSchool(token, schoolId);
studentSchoolAssociation.put(Constants.ATTR_SCHOOL, school);
}
return studentSchoolAssociations;
}
/**
* Get a list of attendances for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getAttendanceForStudent(String token, String studentId, Map<String, String> params) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.ATTENDANCES_ENTITY
+ "?" + this.buildQueryString(params), studentId);
}
/**
* Get a list of academic records for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getAcademicRecordsForStudent(String token, String studentId, Map<String, String> params) {
if (params != null) {
params.put(SDKConstants.PARAM_STUDENT_ID, studentId);
}
return this.readEntityList(token, SDKConstants.ACADEMIC_RECORDS_ENTITY + "?" + this.buildQueryString(params),
studentId);
}
/**
* Get a list of assessments using a list of ids
*
* @param token
* @param ids
* @param params
* @return
*/
@Override
public List<GenericEntity> getAssessments(String token, List<String> ids, Map<String, String> params) {
return this.readEntityList(token,
SDKConstants.ASSESSMENTS_ENTITY + buildListString(ids) + "?" + this.buildQueryString(params), ids);
}
/**
* Get a list of assessments for the given student id
*
* @param token
* @param studentId
* @param params
* @return
*/
@Override
public List<GenericEntity> getAssessmentsForStudent(String token, String studentId) {
return this.readEntityList(token, SDKConstants.STUDENTS_ENTITY + studentId + SDKConstants.STUDENT_ASSMT_ASSOC
+ "?" + this.buildQueryString(null), studentId);
}
/**
* Get an assessment identified by id
*
* @param token
* @param id
* @return
*/
@Override
public GenericEntity getAssessment(String token, String id) {
return this.readEntity(token, SDKConstants.ASSESSMENTS_ENTITY + id, id);
}
/*
* *****************************************************
* Core API SDK Methods
* *****************************************************
*/
/**
* Read a custom entity using the SDK
*
* @param token
* @param url
* @param entityClass
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected Object readCustomEntity(String token, String url, Class entityClass) {
Object entity = null;
try {
List<Object> entityList = new ArrayList<Object>();
sdkClient.read(token, entityList, url, entityClass);
if (entityList.size() > 0) {
entity = entityList.get(0);
}
} catch (Exception e) {
LOGGER.error("Exception occurred during API read", e);
}
return entity;
}
/**
* Read a resource entity using the SDK
*
* @param token
* @param url
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected GenericEntity readEntity(String token, String url) {
GenericEntity entity = null;
try {
List<GenericEntity> entityList = new ArrayList<GenericEntity>();
sdkClient.read(token, entityList, url, GenericEntity.class);
if (entityList.size() > 0) {
entity = entityList.get(0);
}
} catch (Exception e) {
LOGGER.error("Exception occurred during API read", e);
}
return entity;
}
/**
* Read a resource entity using the SDK
*
* @param token
* @param url
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected GenericEntity readEntity(String token, String url, String id) {
if ((id == null) || (id.length() <= 0)) {
return null;
} else {
return readEntity(token, url);
}
}
/**
* Read a list of resource entities using the SDK
*
* @param token
* @param url
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected List<GenericEntity> readEntityList(String token, String url) {
List<GenericEntity> entityList = new ArrayList<GenericEntity>();
try {
sdkClient.read(token, entityList, url, GenericEntity.class);
} catch (Exception e) {
LOGGER.error("Exception occurred during API read", e);
}
return entityList;
}
/**
* Read a list of resource entities using the SDK. This method checks id for
* null or size == 0 and returns Collections.emptyList iff true.
*
* @param token
* @param url
* @param id
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected List<GenericEntity> readEntityList(String token, String url, List id) {
if (id == null || id.size() == 0) {
return Collections.emptyList();
} else {
return readEntityList(token, url);
}
}
/**
* Read a list of resource entities using the SDK. This method checks id for
* null or length == 0 and returns defaultList iff true.
*
* @param token
* @param url
* @param id
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected List<GenericEntity> readEntityList(String token, String url, String id) {
if (id == null || id.length() <= 0) {
return Collections.emptyList();
} else {
return readEntityList(token, url);
}
}
/**
* Create a resource entity using the SDK
*
* @param token
* @param url
* @param entity
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected void createEntity(String token, String url, GenericEntity entity) {
try {
sdkClient.create(token, url, entity);
} catch (Exception e) {
LOGGER.error("Exception occurred during API create", e);
}
}
/**
* Update a resource entity using the SDK
*
* @param token
* @param url
* @param entity
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected void updateEntity(String token, String url, GenericEntity entity) {
try {
sdkClient.update(token, url, entity);
} catch (Exception e) {
LOGGER.error("Exception occurred during API update", e);
}
}
/**
* Delete a resource entity using the SDK
*
* @param token
* @param url
* @param entity
* @return
*/
@ExecutionTimeLogger.LogExecutionTime
protected void deleteEntity(String token, String url) {
try {
sdkClient.deleteByToken(token, url);
} catch (Exception e) {
LOGGER.error("Exception occurred during API delete", e);
}
}
/*
* *****************************************************
* API Helper Methods
* *****************************************************
*/
/**
* Given a link in the API response, extract the entity's unique id
*
* @param link
* @return
*/
private String parseId(Map linkMap) {
String id;
int index = ((String) (linkMap.get(Constants.ATTR_HREF))).lastIndexOf("/");
id = ((String) (linkMap.get(Constants.ATTR_HREF))).substring(index + 1);
return id;
}
/**
* Extract the specified attribute's value from each entity in the given entity list
*
* @param entities
* @param attributeName
* @return
*/
private List<String> extractAttributesFromEntities(List<GenericEntity> entities, String attributeName) {
List<String> attributeList = new ArrayList<String>();
if (entities != null) {
for (GenericEntity entity : entities) {
String attributeValue = (String) entity.get(attributeName);
if ((attributeValue != null) && (attributeValue.length() > 0)) {
attributeList.add(attributeValue);
}
}
}
return attributeList;
}
/**
* Extract the link with the given relationship from an entity
*
* @param entity
* @param rel
* @return
*/
private List<String> extractLinksFromEntity(GenericEntity entity, String rel) {
List<String> linkList = new ArrayList<String>();
if (entity != null && entity.containsKey(Constants.ATTR_LINKS)) {
for (Map link : (List<Map>) (entity.get(Constants.ATTR_LINKS))) {
if (link.get(Constants.ATTR_REL).toString().contains(rel)) {
String href = (String) link.get(Constants.ATTR_HREF);
linkList.add(href);
}
}
}
return linkList;
}
/**
* Enrich section entities with session details to be leveraged during filtering
*
* @param token
* @param sections
*/
private void enrichSectionsWithSessionDetails(String token, List<GenericEntity> sections) {
List<GenericEntity> sessions = this.getSessions(token, null);
if ((sessions != null) && (sections != null)) {
// Setup sessions lookup map
Map<String, GenericEntity> sessionMap = new HashMap<String, GenericEntity>();
for (GenericEntity session : sessions) {
sessionMap.put(session.getId(), session);
}
// Enrich each section with session entity
for (GenericEntity section : sections) {
String sessionIdAttribute = (String) section.get(Constants.ATTR_SESSION_ID);
if (sessionIdAttribute != null) {
GenericEntity session = sessionMap.get(sessionIdAttribute);
section.put(Constants.ATTR_SESSION, session);
}
}
}
}
/**
* Process sections to ensure section name and filter historical data if specified
*
* @param sections
* @param filterHistoricalData
* @return
*/
private List<GenericEntity> filterCurrentSections(List<GenericEntity> sections, boolean filterHistoricalData) {
List<GenericEntity> filteredSections = sections;
if (filterHistoricalData) {
filteredSections = new ArrayList<GenericEntity>();
}
if (sections != null && sections.size() > 0) {
// Setup grace period date
Calendar gracePeriodCalendar = Calendar.getInstance();
gracePeriodCalendar.setTimeInMillis(System.currentTimeMillis());
try {
if (gracePeriod != null && !gracePeriod.equals("")) {
int daysToSubtract = Integer.parseInt(gracePeriod) * -1;
gracePeriodCalendar.add(Calendar.DATE, daysToSubtract);
}
} catch (NumberFormatException exception) {
LOGGER.warn("Invalid grace period: {}", exception.getMessage());
}
for (GenericEntity section : sections) {
// Ensure section name
ensureSectionName(section);
// Filter historical sections/sessions if necessary
if (filterHistoricalData) {
Map<String, Object> session = (Map<String, Object>) section.get(Constants.ATTR_SESSION);
// Verify section has been enriched with session details
if (session != null) {
try {
// Setup session end date
String endDateAttribute = (String) session.get(Constants.ATTR_SESSION_END_DATE);
DateFormat formatter = new SimpleDateFormat(Constants.ATTR_DATE_FORMAT);
Date sessionEndDate = formatter.parse(endDateAttribute);
Calendar sessionEndCalendar = Calendar.getInstance();
sessionEndCalendar.setTimeInMillis(sessionEndDate.getTime());
// Add filtered section if grace period adjusted date is before
// or equal to session end date
if (gracePeriodCalendar.compareTo(sessionEndCalendar) <= 0) {
filteredSections.add(section);
}
} catch (IllegalArgumentException exception) {
LOGGER.warn("Invalid session date formatter configuration: {}", exception.getMessage());
} catch (ParseException exception) {
LOGGER.warn("Invalid session date format: {}", exception.getMessage());
}
}
}
}
}
return filteredSections;
}
/**
* Match schools and sections. Also retrieve course info.
*
* @param sections
* @param token
* @return
*/
private List<GenericEntity> matchSchoolsAndSections(List<GenericEntity> schools, List<GenericEntity> sections,
String token) {
// collect associated course first.
HashMap<String, GenericEntity> courseMap = new HashMap<String, GenericEntity>();
HashMap<String, String> sectionIDToCourseIDMap = new HashMap<String, String>();
getCourseSectionsMappings(sections, token, courseMap, sectionIDToCourseIDMap);
// now collect associated schools.
HashMap<String, GenericEntity> schoolMap = new HashMap<String, GenericEntity>();
HashMap<String, String> sectionIDToSchoolIDMap = new HashMap<String, String>();
getSchoolSectionsMappings(sections, token, schools, schoolMap, sectionIDToSchoolIDMap);
// Now associate course and school.
// There is no direct course-school association in ed-fi. For any section associated to
// a school, its course will also be associated.
HashMap<String, HashSet<String>> schoolIDToCourseIDMap = new HashMap<String, HashSet<String>>();
if (sections != null) {
for (int i = 0; i < sections.size(); i++) {
GenericEntity section = sections.get(i);
if (sectionIDToSchoolIDMap.containsKey(section.get(Constants.ATTR_ID))
&& sectionIDToCourseIDMap.containsKey(section.get(Constants.ATTR_ID))) {
String schoolId = sectionIDToSchoolIDMap.get(section.get(Constants.ATTR_ID));
String courseId = sectionIDToCourseIDMap.get(section.get(Constants.ATTR_ID));
if (!schoolIDToCourseIDMap.containsKey(schoolId)) {
schoolIDToCourseIDMap.put(schoolId, new HashSet<String>());
}
schoolIDToCourseIDMap.get(schoolId).add(courseId);
}
}
}
// now create the generic entity
for (String schoolId : schoolIDToCourseIDMap.keySet()) {
GenericEntity s = schoolMap.get(schoolId);
for (String courseId : schoolIDToCourseIDMap.get(schoolId)) {
GenericEntity c = courseMap.get(courseId);
s.appendToList(Constants.ATTR_COURSES, c);
}
}
return new ArrayList<GenericEntity>(schoolMap.values());
}
/**
* Get the associations between courses and sections
*/
private List<GenericEntity> getCourseSectionMappings(List<GenericEntity> sections, String token) {
Map<String, GenericEntity> courseMap = new HashMap<String, GenericEntity>();
Map<String, String> sectionIDToCourseIDMap = new HashMap<String, String>();
// this temporary sectionLookup will be used for cross reference between
// courseId and
// section.
Map<String, Set<GenericEntity>> sectionLookup = new HashMap<String, Set<GenericEntity>>();
// iterate each section
if (sections != null) {
Map<String, String> courseOfferingToCourseIDMap = new HashMap<String, String>();
// find the course for each course offering
List<GenericEntity> courseOfferings = readEntityList(token, SDKConstants.COURSE_OFFERINGS + "?" + this.buildQueryString(null));
if (courseOfferings != null) {
for (GenericEntity courseOffering : courseOfferings) {
// Get course using courseId reference in section
String courseOfferingId = (String) courseOffering.get(Constants.ATTR_ID);
String courseId = (String) courseOffering.get(Constants.ATTR_COURSE_ID);
courseOfferingToCourseIDMap.put(courseOfferingId, courseId);
}
}
for (GenericEntity section : sections) {
// Get course using courseId reference in section
String courseOfferingId = (String) section.get(Constants.ATTR_COURSE_OFFERING_ID);
String courseId = courseOfferingToCourseIDMap.get(courseOfferingId);
if (!sectionLookup.containsKey(courseId)) {
sectionLookup.put(courseId, new HashSet<GenericEntity>());
}
sectionLookup.get(courseId).add(section);
}
// get course Entity
List<GenericEntity> courses = readEntityList(token, SDKConstants.COURSES_ENTITY + "?" + this.buildQueryString(null));
// update courseMap with courseId. "id" for this entity
for (GenericEntity course : courses) {
// Add course to courseMap
//courseMap.put(course.getId(), course);
Set<GenericEntity> matchedSections = sectionLookup.get(course.getId());
if (matchedSections != null) {
// Add course to courseMap
courseMap.put(course.getId(), course);
Iterator<GenericEntity> sectionEntities = matchedSections.iterator();
while (sectionEntities.hasNext()) {
GenericEntity sectionEntity = sectionEntities.next();
course.appendToList(Constants.ATTR_SECTIONS, sectionEntity);
// update sectionIdToCourseIdMap
sectionIDToCourseIDMap.put(sectionEntity.getId(), course.getId());
}
}
}
}
return new ArrayList<GenericEntity>(courseMap.values());
}
/**
* Get the associations between courses and sections
*/
private void getCourseSectionsMappings(List<GenericEntity> sections, String token,
Map<String, GenericEntity> courseMap, Map<String, String> sectionIDToCourseIDMap) {
// this variable is used to prevent sending duplicate courseId to API
Set<String> courseIdTracker = new HashSet<String>();
// this temporary sectionLookup will be used for cross reference between
// courseId and
// section.
Map<String, Set<GenericEntity>> sectionLookup = new HashMap<String, Set<GenericEntity>>();
List<String> courseIds = new ArrayList<String>();
// iterate each section
if (sections != null) {
for (GenericEntity section : sections) {
// Get course using courseId reference in section
String courseId = (String) section.get(Constants.ATTR_COURSE_ID);
// search course which doesn't exist already
if (!courseMap.containsKey(courseId)) {
if (!courseIdTracker.contains(courseId)) {
courseIds.add(courseId);
courseIdTracker.add(courseId);
}
if (!sectionLookup.containsKey(courseId)) {
sectionLookup.put(courseId, new HashSet<GenericEntity>());
}
sectionLookup.get(courseId).add(section);
}
}
}
// get Entities by given courseIds
if (courseIds.size() > 0) {
// get course Entities
List<GenericEntity> courses = getCourses(token, courseIds, null);
// update courseMap with courseId. "id" for this entity
for (GenericEntity course : courses) {
// Add course to courseMap
courseMap.put(course.getId(), course);
Set<GenericEntity> matchedSections = sectionLookup.get(course.getId());
if (matchedSections != null) {
Iterator<GenericEntity> sectionEntities = matchedSections.iterator();
while (sectionEntities.hasNext()) {
GenericEntity sectionEntity = sectionEntities.next();
course.appendToList(Constants.ATTR_SECTIONS, sectionEntity);
// update sectionIdToCourseIdMap
sectionIDToCourseIDMap.put(sectionEntity.getId(), course.getId());
}
}
}
}
}
/**
* Get the associations between schools and sections
*/
private void getSchoolSectionsMappings(List<GenericEntity> sections, String token, List<GenericEntity> schools,
Map<String, GenericEntity> schoolMap, Map<String, String> sectionIDToSchoolIDMap) {
// temporary cross reference between schoolId and sections
Map<String, Set<GenericEntity>> sectionLookup = new HashMap<String, Set<GenericEntity>>();
// iterate each section
if (sections != null) {
for (GenericEntity section : sections) {
String schoolId = (String) section.get(Constants.ATTR_SCHOOL_ID);
// search school which doesn't exist already
if (!schoolMap.containsKey(schoolId)) {
if (!sectionLookup.containsKey(schoolId)) {
sectionLookup.put(schoolId, new HashSet<GenericEntity>());
}
sectionLookup.get(schoolId).add(section);
}
}
}
if (schools != null) {
// update schoolMap with schoolId. "id" for this entity
for (GenericEntity school : schools) {
String schoolId = school.getId();
Set<GenericEntity> matchedSections = sectionLookup.get(schoolId);
if (matchedSections != null) {
for (GenericEntity sectionEntity : matchedSections) {
// Add school to schoolmap
schoolMap.put(school.getId(), school);
// update sectionIdToSchoolIdMap
sectionIDToSchoolIDMap.put(sectionEntity.getId(), schoolId);
}
}
}
}
}
private void ensureSectionName(GenericEntity section) {
if ((section != null) && (section.get(Constants.ATTR_SECTION_NAME) == null)) {
section.put(Constants.ATTR_SECTION_NAME, section.get(Constants.ATTR_UNIQUE_SECTION_CODE));
}
}
/**
* Builds a comma-separated string from the given string item list
*
* @param items
* @return
*/
private String buildListString(List<String> items) {
return (items == null) ? "" : StringUtils.join(items, ",");
}
/**
* Builds a query string from the given parameter map
*
* @param params
* @return
*/
private String buildQueryString(Map<String, String> params) {
StringBuilder query = new StringBuilder();
String separator = "";
// Setup defaults including paging disabled
if (params == null) {
params = new HashMap<String, String>();
}
if (!params.containsKey(Constants.LIMIT)) {
params.put(Constants.LIMIT, String.valueOf(Constants.MAX_RESULTS));
}
for (Map.Entry<String, String> e : params.entrySet()) {
query.append(separator);
separator = "&";
query.append(e.getKey());
query.append("=");
query.append(e.getValue());
}
return query.toString();
}
}
|
Having staff dig their schools
|
sli/dashboard/src/main/java/org/slc/sli/dashboard/client/SDKAPIClient.java
|
Having staff dig their schools
|
|
Java
|
apache-2.0
|
d407684938f6d55be353a0ab759cba0dfbe47ee9
| 0
|
code4craft/ibatis-plugin
|
package org.intellij.ibatis.inspections;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiParameter;
import com.intellij.psi.PsiType;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.xml.highlighting.DomElementAnnotationHolder;
import org.intellij.ibatis.IbatisSqlMapModel;
import org.intellij.ibatis.dom.sqlMap.Result;
import org.intellij.ibatis.dom.sqlMap.ResultMap;
import org.intellij.ibatis.dom.sqlMap.SqlMap;
import org.intellij.ibatis.provider.FieldAccessMethodReferenceProvider;
import org.intellij.ibatis.util.IbatisBundle;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.List;
/**
* inspection for set null to primary type variable
*/
public class NullSettedToPrimaryTypeInspection extends SqlMapInspection {
@Nls @NotNull public String getDisplayName() {
return IbatisBundle.message("ibatis.sqlmap.inspection.nulltoprimarytype.name");
}
@NonNls @NotNull public String getShortName() {
return IbatisBundle.message("ibatis.sqlmap.inspection.nulltoprimarytype.id");
}
@SuppressWarnings({"ConstantConditions"})
protected void checkResultMap(IbatisSqlMapModel sqlMapModel, SqlMap sqlMap, ResultMap resultMap, DomElementAnnotationHolder holder) {
PsiClass psiClass = resultMap.getClazz().getValue();
if (psiClass == null) return;
List<Result> results = resultMap.getResults();
for (Result result : results) {
if (result.getXmlTag().getAttribute("nullValue") == null) { //null value setter
String propertyName = result.getProperty().getValue();
PsiMethod setMethod = null;
if (propertyName!=null && !propertyName.contains(".")) {
PsiMethod[] methods = psiClass.findMethodsByName("set" + StringUtil.capitalize(propertyName), true);
if (methods.length > 0) {
setMethod = methods[0];
}
} else //deep child
{
String field1 = propertyName.substring(0, propertyName.indexOf('.'));
String field2 = propertyName.substring(propertyName.indexOf('.') + 1);
PsiClass fieldClass = FieldAccessMethodReferenceProvider.findGetterMethodReturnType(psiClass, "get" + StringUtil.capitalize(field1));
if (fieldClass != null) {
PsiMethod[] methods = fieldClass.findMethodsByName("set" + StringUtil.capitalize(field2), true);
if (methods.length > 0) {
setMethod = methods[0];
}
}
}
if (setMethod != null) {
PsiParameter[] psiParameters = setMethod.getParameterList().getParameters();
if (psiParameters.length == 1) {
PsiType[] superTypes = psiParameters[0].getType().getSuperTypes();
if (superTypes.length < 1) // primary type
{
holder.createProblem(result, HighlightSeverity.INFO, IbatisBundle.message("ibatis.sqlmap.inspection.nulltoprimarytype.error"), new AddNullValueForResultElementQuickFix(result));
}
}
}
}
}
}
public class AddNullValueForResultElementQuickFix implements LocalQuickFix {
private Result result;
public AddNullValueForResultElementQuickFix(Result result) {
this.result = result;
}
@NotNull public String getName() {
return "add nullValue for result element";
}
@NotNull public String getFamilyName() {
return "add nullValue for result element";
}
@SuppressWarnings({"ConstantConditions"})
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor problemDescriptor) {
try {
result.getXmlTag().setAttribute("nullValue", "0");
} catch (IncorrectOperationException e) {
e.printStackTrace();
}
}
}
}
|
src/org/intellij/ibatis/inspections/NullSettedToPrimaryTypeInspection.java
|
package org.intellij.ibatis.inspections;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiParameter;
import com.intellij.psi.PsiType;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.xml.highlighting.DomElementAnnotationHolder;
import org.intellij.ibatis.IbatisSqlMapModel;
import org.intellij.ibatis.dom.sqlMap.Result;
import org.intellij.ibatis.dom.sqlMap.ResultMap;
import org.intellij.ibatis.dom.sqlMap.SqlMap;
import org.intellij.ibatis.provider.FieldAccessMethodReferenceProvider;
import org.intellij.ibatis.util.IbatisBundle;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.List;
/**
* inspection for set null to primary type variable
*/
public class NullSettedToPrimaryTypeInspection extends SqlMapInspection {
@Nls @NotNull public String getDisplayName() {
return IbatisBundle.message("ibatis.sqlmap.inspection.nulltoprimarytype.name");
}
@NonNls @NotNull public String getShortName() {
return IbatisBundle.message("ibatis.sqlmap.inspection.nulltoprimarytype.id");
}
@SuppressWarnings({"ConstantConditions"})
protected void checkResultMap(IbatisSqlMapModel sqlMapModel, SqlMap sqlMap, ResultMap resultMap, DomElementAnnotationHolder holder) {
PsiClass psiClass = resultMap.getClazz().getValue();
if (psiClass == null) return;
List<Result> results = resultMap.getResults();
for (Result result : results) {
if (result.getXmlTag().getAttribute("nullValue") == null) { //null value setter
String propertyName = result.getProperty().getValue();
PsiMethod setMethod = null;
if (propertyName!=null && !propertyName.contains(".")) {
PsiMethod[] methods = psiClass.findMethodsByName("set" + StringUtil.capitalize(propertyName), true);
if (methods.length > 0) {
setMethod = methods[0];
}
} else //deep child
{
String field1 = propertyName.substring(0, propertyName.indexOf('.'));
String field2 = propertyName.substring(propertyName.indexOf('.') + 1);
PsiClass fieldClass = FieldAccessMethodReferenceProvider.findGetterMethodReturnType(psiClass, "get" + StringUtil.capitalize(field1));
if (fieldClass != null) {
PsiMethod[] methods = fieldClass.findMethodsByName("set" + StringUtil.capitalize(field2), true);
if (methods.length > 0) {
setMethod = methods[0];
}
}
}
if (setMethod != null) {
PsiParameter[] psiParameters = setMethod.getParameterList().getParameters();
if (psiParameters.length == 1) {
PsiType[] superTypes = psiParameters[0].getType().getSuperTypes();
if (superTypes.length < 1) // primary type
{
holder.createProblem(result, HighlightSeverity.WARNING, IbatisBundle.message("ibatis.sqlmap.inspection.nulltoprimarytype.error"), new AddNullValueForResultElementQuickFix(result));
}
}
}
}
}
}
public class AddNullValueForResultElementQuickFix implements LocalQuickFix {
private Result result;
public AddNullValueForResultElementQuickFix(Result result) {
this.result = result;
}
@NotNull public String getName() {
return "add nullValue for result element";
}
@NotNull public String getFamilyName() {
return "add nullValue for result element";
}
@SuppressWarnings({"ConstantConditions"})
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor problemDescriptor) {
try {
result.getXmlTag().setAttribute("nullValue", "0");
} catch (IncorrectOperationException e) {
e.printStackTrace();
}
}
}
}
|
minor update
git-svn-id: a3757b7b28cb5f166982e3e590f726e1a71ef014@524 a27988d2-9830-0410-8773-31d6671a21de
|
src/org/intellij/ibatis/inspections/NullSettedToPrimaryTypeInspection.java
|
minor update
|
|
Java
|
apache-2.0
|
24bcca11629dbd6302161cc36cb312595b2a1790
| 0
|
tgrall/drill,vkorukanti/drill,yufeldman/incubator-drill,santoshsahoo/drill,kkhatua/drill,amithadke/drill,ebegoli/drill,KulykRoman/drill,KulykRoman/drill,weijietong/drill,arina-ielchiieva/drill,nagix/drill,julianhyde/drill,mehant/drill,hnfgns/incubator-drill,arina-ielchiieva/drill,johanwitters/drill,weijietong/drill,apache/drill,akumarb2010/incubator-drill,Ben-Zvi/drill,sindhurirayavaram/drill,dsbos/incubator-drill,jdownton/drill,kkhatua/drill,activitystream/drill,mapr-demos/drill-pcap-format,jinfengni/incubator-drill,weijietong/drill,parthchandra/incubator-drill,dsbos/incubator-drill,mapr-demos/drill-pcap-format,santoshsahoo/drill,superbstreak/drill,pwong-mapr/incubator-drill,tgrall/drill,superbstreak/drill,caijieming-baidu/drill,hnfgns/incubator-drill,caijieming-baidu/drill,julianhyde/drill,parthchandra/incubator-drill,johanwitters/drill,yufeldman/incubator-drill,bitblender/drill,ebegoli/drill,tgrall/drill,arina-ielchiieva/drill,apache/drill,paul-rogers/drill,KulykRoman/drill,yufeldman/incubator-drill,caijieming-baidu/drill,rchallapalli/drill,ebegoli/drill,abhipol/drill,arina-ielchiieva/drill,paul-rogers/drill,yufeldman/incubator-drill,bitblender/drill,cchang738/drill,johnnywale/drill,ppadma/drill,jinfengni/incubator-drill,vkorukanti/drill,Agirish/drill,Ben-Zvi/drill,jdownton/drill,arina-ielchiieva/drill,kingmesal/drill,hsuanyi/incubator-drill,cchang738/drill,sohami/drill,bitblender/drill,hnfgns/incubator-drill,superbstreak/drill,yufeldman/incubator-drill,apache/drill,sudheeshkatkam/drill,ppadma/drill,jackyxhb/drill,rchallapalli/drill,jdownton/drill,vdiravka/drill,kingmesal/drill,nagix/drill,StevenMPhillips/drill,vvysotskyi/drill,vvysotskyi/drill,sindhurirayavaram/drill,ebegoli/drill,vkorukanti/drill,activitystream/drill,akumarb2010/incubator-drill,adityakishore/drill,mapr/incubator-drill,StevenMPhillips/drill,Ben-Zvi/drill,cchang738/drill,hsuanyi/incubator-drill,Serhii-Harnyk/drill,Serhii-Harnyk/drill,amithadke/drill,adeneche/incubator-drill,StevenMPhillips/drill,KulykRoman/drill,pwong-mapr/incubator-drill,myroch/drill,KulykRoman/drill,jinfengni/incubator-drill,activitystream/drill,kingmesal/drill,cchang738/drill,tgrall/drill,cwestin/incubator-drill,dsbos/incubator-drill,myroch/drill,ppadma/drill,mehant/drill,jackyxhb/drill,Ben-Zvi/drill,adityakishore/drill,apache/drill,rchallapalli/drill,Serhii-Harnyk/drill,pwong-mapr/incubator-drill,adityakishore/drill,paul-rogers/drill,hnfgns/incubator-drill,ssriniva123/drill,adityakishore/drill,mapr/incubator-drill,mapr-demos/drill-pcap-format,sohami/drill,akumarb2010/incubator-drill,sudheeshkatkam/drill,abhipol/drill,dsbos/incubator-drill,parthchandra/incubator-drill,hsuanyi/incubator-drill,ssriniva123/drill,johnnywale/drill,johanwitters/drill,kkhatua/drill,cchang738/drill,StevenMPhillips/drill,Serhii-Harnyk/drill,abhipol/drill,sudheeshkatkam/drill,ppadma/drill,parthchandra/drill,jdownton/drill,johnnywale/drill,santoshsahoo/drill,superbstreak/drill,kkhatua/drill,cwestin/incubator-drill,parthchandra/incubator-drill,adeneche/incubator-drill,abhipol/drill,jdownton/drill,Serhii-Harnyk/drill,Agirish/drill,paul-rogers/drill,adeneche/incubator-drill,caijieming-baidu/drill,caijieming-baidu/drill,jinfengni/incubator-drill,dsbos/incubator-drill,mapr/incubator-drill,akumarb2010/incubator-drill,activitystream/drill,ssriniva123/drill,santoshsahoo/drill,vdiravka/drill,apache/drill,KulykRoman/drill,cwestin/incubator-drill,sudheeshkatkam/drill,parthchandra/drill,bitblender/drill,activitystream/drill,johnnywale/drill,johnnywale/drill,Agirish/drill,sindhurirayavaram/drill,mehant/drill,nagix/drill,kkhatua/drill,mapr/incubator-drill,vvysotskyi/drill,vvysotskyi/drill,mapr-demos/drill-pcap-format,jackyxhb/drill,vdiravka/drill,adeneche/incubator-drill,sohami/drill,bitblender/drill,mehant/drill,kingmesal/drill,StevenMPhillips/drill,amithadke/drill,amithadke/drill,apache/drill,hnfgns/incubator-drill,vkorukanti/drill,myroch/drill,adeneche/incubator-drill,parthchandra/drill,jackyxhb/drill,vdiravka/drill,parthchandra/drill,myroch/drill,Agirish/drill,sindhurirayavaram/drill,julianhyde/drill,amithadke/drill,pwong-mapr/incubator-drill,parthchandra/drill,adityakishore/drill,sudheeshkatkam/drill,jackyxhb/drill,superbstreak/drill,tgrall/drill,santoshsahoo/drill,sohami/drill,julianhyde/drill,rchallapalli/drill,nagix/drill,arina-ielchiieva/drill,pwong-mapr/incubator-drill,julianhyde/drill,ppadma/drill,mapr/incubator-drill,vvysotskyi/drill,akumarb2010/incubator-drill,nagix/drill,superbstreak/drill,rchallapalli/drill,pwong-mapr/incubator-drill,cwestin/incubator-drill,myroch/drill,mapr-demos/drill-pcap-format,parthchandra/drill,jinfengni/incubator-drill,vdiravka/drill,johanwitters/drill,sohami/drill,weijietong/drill,Ben-Zvi/drill,vvysotskyi/drill,ssriniva123/drill,Agirish/drill,cwestin/incubator-drill,ssriniva123/drill,mehant/drill,paul-rogers/drill,hsuanyi/incubator-drill,vkorukanti/drill,ebegoli/drill,cchang738/drill,parthchandra/incubator-drill,mapr-demos/drill-pcap-format,johanwitters/drill,Ben-Zvi/drill,kkhatua/drill,hsuanyi/incubator-drill,ppadma/drill,paul-rogers/drill,nagix/drill,mapr/incubator-drill,abhipol/drill,Agirish/drill,kingmesal/drill,sindhurirayavaram/drill,sohami/drill,weijietong/drill
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.util.FileUtils;
import org.apache.drill.common.util.TestTools;
import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
import org.apache.drill.exec.work.foreman.UnsupportedFunctionException;
import org.apache.drill.PlanTestBase;
import org.junit.Test;
public class TestWindowFunctions extends BaseTestQuery {
static final String WORKING_PATH = TestTools.getWorkingPath();
static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
private static void throwAsUnsupportedException(UserException ex) throws Exception {
SqlUnsupportedException.errorClassNameToException(ex.getOrCreatePBError(false).getException().getExceptionClass());
throw ex;
}
@Test // DRILL-3196
public void testSinglePartition() throws Exception {
final String query = "select sum(n_nationKey) over(partition by n_nationKey) as col1, count(*) over(partition by n_nationKey) as col2 \n" +
"from cp.`tpch/nation.parquet`";
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{0\\} order by \\[\\].*\\[SUM\\(\\$0\\), COUNT\\(\\)",
"Scan.*columns=\\[`n_nationKey`\\].*"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col1", "col2")
.baselineValues(0l, 1l)
.baselineValues(1l, 1l)
.baselineValues(2l, 1l)
.baselineValues(3l, 1l)
.baselineValues(4l, 1l)
.baselineValues(5l, 1l)
.baselineValues(6l, 1l)
.baselineValues(7l, 1l)
.baselineValues(8l, 1l)
.baselineValues(9l, 1l)
.baselineValues(10l, 1l)
.baselineValues(11l, 1l)
.baselineValues(12l, 1l)
.baselineValues(13l, 1l)
.baselineValues(14l, 1l)
.baselineValues(15l, 1l)
.baselineValues(16l, 1l)
.baselineValues(17l, 1l)
.baselineValues(18l, 1l)
.baselineValues(19l, 1l)
.baselineValues(20l, 1l)
.baselineValues(21l, 1l)
.baselineValues(22l, 1l)
.baselineValues(23l, 1l)
.baselineValues(24l, 1l)
.build()
.run();
}
@Test // DRILL-3196
public void testSinglePartitionDefinedInWindowList() throws Exception {
final String query = "select sum(n_nationKey) over w as col \n" +
"from cp.`tpch/nation.parquet` \n" +
"window w as (partition by n_nationKey order by n_nationKey)";
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{0\\} order by \\[0\\].*SUM\\(\\$0\\)",
"Scan.*columns=\\[`n_nationKey`\\].*"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col")
.baselineValues(0l)
.baselineValues(1l)
.baselineValues(2l)
.baselineValues(3l)
.baselineValues(4l)
.baselineValues(5l)
.baselineValues(6l)
.baselineValues(7l)
.baselineValues(8l)
.baselineValues(9l)
.baselineValues(10l)
.baselineValues(11l)
.baselineValues(12l)
.baselineValues(13l)
.baselineValues(14l)
.baselineValues(15l)
.baselineValues(16l)
.baselineValues(17l)
.baselineValues(18l)
.baselineValues(19l)
.baselineValues(20l)
.baselineValues(21l)
.baselineValues(22l)
.baselineValues(23l)
.baselineValues(24l)
.build()
.run();
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3182
public void testWindowFunctionWithDistinct() throws Exception {
try {
final String query = "explain plan for select a2, count(distinct b2) over(partition by a2) \n" +
"from cp.`tpch/nation.parquet`";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3188
public void testWindowFrame() throws Exception {
try {
final String query = "select a2, sum(a2) over(partition by a2 order by a2 rows between 1 preceding and 1 following ) \n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by a2";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3188
public void testRowsUnboundedPreceding() throws Exception {
try {
final String query = "explain plan for select sum(n_nationKey) over(partition by n_nationKey order by n_nationKey \n" +
"rows UNBOUNDED PRECEDING)" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3359
public void testFramesDefinedInWindowClause() throws Exception {
try {
final String query = "explain plan for select sum(n_nationKey) over w \n" +
"from cp.`tpch/nation.parquet` \n" +
"window w as (partition by n_nationKey order by n_nationKey rows UNBOUNDED PRECEDING)";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3326
public void testWindowWithAlias() throws Exception {
try {
String query = "explain plan for SELECT sum(n_nationkey) OVER (PARTITION BY n_name ORDER BY n_name ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING) as col2 \n" +
"from cp.`tpch/nation.parquet`";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3189
public void testWindowWithAllowDisallow() throws Exception {
try {
final String query = "select sum(n_nationKey) over(partition by n_nationKey \n" +
"rows between unbounded preceding and unbounded following disallow partial) \n" +
"from cp.`tpch/nation.parquet` \n" +
"order by n_nationKey";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test // DRILL-3344
public void testWindowGroupBy() throws Exception {
String query = "explain plan for SELECT max(n_nationkey) OVER (), n_name as col2 \n" +
"from cp.`tpch/nation.parquet` \n" +
"group by n_name";
parseErrorHelper(query);
}
@Test // DRILL-3346
public void testWindowGroupByOnView() throws Exception {
try {
String createView = "create view testWindowGroupByOnView(a, b) as \n" +
"select n_nationkey, n_name from cp.`tpch/nation.parquet`";
String query = "explain plan for SELECT max(a) OVER (), b as col2 \n" +
"from testWindowGroupByOnView \n" +
"group by b";
test("use dfs_test.tmp");
test(createView);
parseErrorHelper(query);
} finally {
test("drop view testWindowGroupByOnView");
}
}
@Test // DRILL-3188
public void testWindowFrameEquivalentToDefault() throws Exception {
final String query1 = "select sum(n_nationKey) over(partition by n_nationKey order by n_nationKey) as col\n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
final String query2 = "select sum(n_nationKey) over(partition by n_nationKey order by n_nationKey \n" +
"range between unbounded preceding and current row) as col \n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
final String query3 = "select sum(n_nationKey) over(partition by n_nationKey \n" +
"rows BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) as col \n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
// Validate the plan
final String[] expectedPlan1 = {"Window.*partition \\{0\\} order by \\[0\\].*SUM\\(\\$0\\)",
"Scan.*columns=\\[`n_nationKey`\\].*"};
final String[] excludedPatterns1 = {"Scan.*columns=\\[`\\*`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query1, expectedPlan1, excludedPatterns1);
testBuilder()
.sqlQuery(query1)
.unOrdered()
.baselineColumns("col")
.baselineValues(0l)
.baselineValues(1l)
.baselineValues(2l)
.baselineValues(3l)
.baselineValues(4l)
.baselineValues(5l)
.baselineValues(6l)
.baselineValues(7l)
.baselineValues(8l)
.baselineValues(9l)
.baselineValues(10l)
.baselineValues(11l)
.baselineValues(12l)
.baselineValues(13l)
.baselineValues(14l)
.baselineValues(15l)
.baselineValues(16l)
.baselineValues(17l)
.baselineValues(18l)
.baselineValues(19l)
.baselineValues(20l)
.baselineValues(21l)
.baselineValues(22l)
.baselineValues(23l)
.baselineValues(24l)
.build()
.run();
final String[] expectedPlan2 = {"Window.*partition \\{0\\} order by \\[0\\].*SUM\\(\\$0\\)",
"Scan.*columns=\\[`n_nationKey`\\].*"};
final String[] excludedPatterns2 = {"Scan.*columns=\\[`\\*`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query2, expectedPlan2, excludedPatterns2);
testBuilder()
.sqlQuery(query2)
.unOrdered()
.baselineColumns("col")
.baselineValues(0l)
.baselineValues(1l)
.baselineValues(2l)
.baselineValues(3l)
.baselineValues(4l)
.baselineValues(5l)
.baselineValues(6l)
.baselineValues(7l)
.baselineValues(8l)
.baselineValues(9l)
.baselineValues(10l)
.baselineValues(11l)
.baselineValues(12l)
.baselineValues(13l)
.baselineValues(14l)
.baselineValues(15l)
.baselineValues(16l)
.baselineValues(17l)
.baselineValues(18l)
.baselineValues(19l)
.baselineValues(20l)
.baselineValues(21l)
.baselineValues(22l)
.baselineValues(23l)
.baselineValues(24l)
.build()
.run();
final String[] expectedPlan3 = {"Window.*partition \\{0\\}.*SUM\\(\\$0\\)",
"Scan.*columns=\\[`n_nationKey`\\].*"};
final String[] excludedPatterns3 = {"Scan.*columns=\\[`\\*`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query3, expectedPlan3, excludedPatterns3);
testBuilder()
.sqlQuery(query3)
.unOrdered()
.baselineColumns("col")
.baselineValues(0l)
.baselineValues(1l)
.baselineValues(2l)
.baselineValues(3l)
.baselineValues(4l)
.baselineValues(5l)
.baselineValues(6l)
.baselineValues(7l)
.baselineValues(8l)
.baselineValues(9l)
.baselineValues(10l)
.baselineValues(11l)
.baselineValues(12l)
.baselineValues(13l)
.baselineValues(14l)
.baselineValues(15l)
.baselineValues(16l)
.baselineValues(17l)
.baselineValues(18l)
.baselineValues(19l)
.baselineValues(20l)
.baselineValues(21l)
.baselineValues(22l)
.baselineValues(23l)
.baselineValues(24l)
.build()
.run();
}
@Test // DRILL-3204
public void testWindowWithJoin() throws Exception {
final String query = "select sum(t1.r_regionKey) over(partition by t1.r_regionKey) as col \n" +
"from cp.`tpch/region.parquet` t1, cp.`tpch/nation.parquet` t2 \n" +
"where t1.r_regionKey = t2.n_nationKey \n" +
"group by t1.r_regionKey";
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{0\\}.*SUM\\(\\$0\\)",
"Scan.*columns=\\[`n_nationKey`\\].*",
"Scan.*columns=\\[`n_nationKey`\\].*"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col")
.baselineValues(0l)
.baselineValues(1l)
.baselineValues(2l)
.baselineValues(3l)
.baselineValues(4l)
.build()
.run();
}
@Test // DRILL-3298
public void testCountEmptyPartitionByWithExchange() throws Exception {
String query = String.format("select count(*) over (order by o_orderpriority) as cnt from dfs.`%s/multilevel/parquet` where o_custkey < 100", TEST_RES_PATH);
try {
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{\\} order by \\[0\\].*COUNT\\(\\)",
"Scan.*columns=\\[`o_custkey`, `o_orderpriority`\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("cnt")
.optionSettingQueriesForTestQuery("alter session set `planner.slice_target` = 1")
.baselineValues(1l)
.baselineValues(4l)
.baselineValues(4l)
.baselineValues(4l)
.build().run();
} finally {
test("alter session set `planner.slice_target` = " + ExecConstants.SLICE_TARGET_DEFAULT);
}
}
/* Verify the output of aggregate functions (which are reduced
* eg: avg(x) = sum(x)/count(x)) return results of the correct
* data type (double)
*/
@Test
public void testAvgVarianceWindowFunctions() throws Exception {
final String avgQuery = "select avg(n_nationkey) over (partition by n_nationkey) col1 " +
"from cp.`tpch/nation.parquet` " +
"where n_nationkey = 1";
// Validate the plan
final String[] expectedPlan1 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$0\\), COUNT\\(\\$0\\)",
"Scan.*columns=\\[`n_nationkey`\\]"};
final String[] excludedPatterns1 = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(avgQuery, expectedPlan1, excludedPatterns1);
testBuilder()
.sqlQuery(avgQuery)
.unOrdered()
.baselineColumns("col1")
.baselineValues(1.0d)
.go();
final String varianceQuery = "select var_pop(n_nationkey) over (partition by n_nationkey) col1 " +
"from cp.`tpch/nation.parquet` " +
"where n_nationkey = 1";
// Validate the plan
final String[] expectedPlan2 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$1\\), SUM\\(\\$0\\), COUNT\\(\\$0\\)",
"Scan.*columns=\\[`n_nationkey`\\]"};
final String[] excludedPatterns2 = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(varianceQuery, expectedPlan2, excludedPatterns2);
testBuilder()
.sqlQuery(varianceQuery)
.unOrdered()
.baselineColumns("col1")
.baselineValues(0.0d)
.go();
}
@Test
public void testWindowFunctionWithKnownType() throws Exception {
final String query = "select sum(cast(col_int as int)) over (partition by col_varchar) as col1 " +
"from cp.`jsoninput/large_int.json` limit 1";
// Validate the plan
final String[] expectedPlan1 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$1\\)",
"Scan.*columns=\\[`col_varchar`, `col_int`\\]"};
final String[] excludedPatterns1 = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan1, excludedPatterns1);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col1")
.baselineValues(2147483649l)
.go();
final String avgQuery = "select avg(cast(col_int as int)) over (partition by col_varchar) as col1 " +
"from cp.`jsoninput/large_int.json` limit 1";
// Validate the plan
final String[] expectedPlan2 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$1\\), COUNT\\(\\$1\\)",
"Scan.*columns=\\[`col_varchar`, `col_int`\\]"};
final String[] excludedPatterns2 = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(avgQuery, expectedPlan2, excludedPatterns2);
testBuilder()
.sqlQuery(avgQuery)
.unOrdered()
.baselineColumns("col1")
.baselineValues(1.0737418245E9d)
.go();
}
@Test
public void testCompoundIdentifierInWindowDefinition() throws Exception {
String root = FileUtils.getResourceAsFile("/multilevel/csv/1994/Q1/orders_94_q1.csv").toURI().toString();
String query = String.format("SELECT count(*) OVER w as col1, count(*) OVER w as col2 \n" +
"FROM dfs_test.`%s` \n" +
"WINDOW w AS (PARTITION BY columns[1] ORDER BY columns[0] DESC)", root);
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{1\\} order by \\[0 DESC\\].*COUNT\\(\\)",
"Scan.*columns=\\[`columns`\\[0\\], `columns`\\[1\\]\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
// Validate the result
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("col1", "col2")
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.build()
.run();
}
@Test
public void testRankWithGroupBy() throws Exception {
final String query = "select dense_rank() over (order by l_suppkey) as rank1 " +
" from cp.`tpch/lineitem.parquet` group by l_partkey, l_suppkey order by 1 desc limit 1";
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{\\} order by \\[1\\].*DENSE_RANK\\(\\)",
"Scan.*columns=\\[`l_partkey`, `l_suppkey`\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("rank1")
.baselineValues(100l)
.go();
}
@Test // DRILL-3404
public void testWindowSumAggIsNotNull() throws Exception {
String query = String.format("select count(*) cnt from (select sum ( c1 ) over ( partition by c2 order by c1 asc nulls first ) w_sum from dfs.`%s/window/table_with_nulls.parquet` ) sub_query where w_sum is not null", TEST_RES_PATH);
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{1\\} order by \\[0 ASC-nulls-first\\].*SUM\\(\\$0\\)",
"Scan.*columns=\\[`c1`, `c2`\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("cnt")
.baselineValues(26l)
.build().run();
}
@Test // DRILL-3292
public void testWindowConstants() throws Exception {
String query = "select rank() over w fn, sum(2) over w sumINTEGER, sum(employee_id) over w sumEmpId, sum(0.5) over w sumFLOAT \n" +
"from cp.`employee.json` \n" +
"where position_id = 2 \n" +
"window w as(partition by position_id order by employee_id)";
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{0\\} order by \\[1\\].*RANK\\(\\), SUM\\(\\$2\\), SUM\\(\\$1\\), SUM\\(\\$3\\)",
"Scan.*columns=\\[`position_id`, `employee_id`\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("fn", "sumINTEGER", "sumEmpId", "sumFLOAT")
.baselineValues(1l, 2l, 2l, 0.5)
.baselineValues(2l, 4l, 6l, 1.0)
.baselineValues(3l, 6l, 11l, 1.5)
.baselineValues(4l, 8l, 31l, 2.0)
.baselineValues(5l, 10l, 52l, 2.5)
.baselineValues(6l, 12l, 74l, 3.0)
.build()
.run();
}
@Test // DRILL-3567
public void testMultiplePartitions1() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select count(*) over(partition by b1 order by c1) as count1, \n" +
"sum(a1) over(partition by b1 order by c1) as sum1, \n" +
"count(*) over(partition by a1 order by c1) as count2 \n" +
"from dfs_test.`%s`", root);
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{2\\} order by \\[1\\].*COUNT\\(\\)",
"Window.*partition \\{0\\} order by \\[1\\].*COUNT\\(\\), SUM\\(\\$2\\)",
"Scan.*columns=\\[`b1`, `c1`, `a1`\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("count1", "sum1", "count2")
.baselineValues(1l, 0l, 2l)
.baselineValues(1l, 0l, 2l)
.baselineValues(2l, 0l, 5l)
.baselineValues(3l, 0l, 5l)
.baselineValues(3l, 0l, 5l)
.baselineValues(1l, 10l, 2l)
.baselineValues(1l, 10l, 2l)
.baselineValues(2l, 20l, 5l)
.baselineValues(3l, 30l, 5l)
.baselineValues(3l, 30l, 5l)
.build()
.run();
}
@Test // DRILL-3567
public void testMultiplePartitions2() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select count(*) over(partition by b1 order by c1) as count1, \n" +
"count(*) over(partition by a1 order by c1) as count2, \n" +
"sum(a1) over(partition by b1 order by c1) as sum1 \n" +
"from dfs_test.`%s`", root);
// Validate the plan
final String[] expectedPlan = {"Window.*partition \\{2\\} order by \\[1\\].*COUNT\\(\\)",
"Window.*partition \\{0\\} order by \\[1\\].*COUNT\\(\\), SUM\\(\\$2\\)",
"Scan.*columns=\\[`b1`, `c1`, `a1`\\]"};
final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("count1", "count2", "sum1")
.baselineValues(1l, 2l, 0l)
.baselineValues(1l, 2l, 0l)
.baselineValues(2l, 5l, 0l)
.baselineValues(3l, 5l, 0l)
.baselineValues(3l, 5l, 0l)
.baselineValues(1l, 2l, 10l)
.baselineValues(1l, 2l, 10l)
.baselineValues(2l, 5l, 20l)
.baselineValues(3l, 5l, 30l)
.baselineValues(3l, 5l, 30l)
.build()
.run();
}
@Test // see DRILL-3574
public void testWithAndWithoutPartitions() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select sum(a1) over(partition by b1, c1) as s1, sum(a1) over() as s2 \n" +
"from dfs_test.`%s` \n" +
"order by a1", root);
test("alter session set `planner.slice_target` = 1");
// Validate the plan
final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{\\}.*\n" +
".*UnionExchange"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("s1", "s2")
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(10l, 50l)
.baselineValues(10l, 50l)
.baselineValues(10l, 50l)
.baselineValues(20l, 50l)
.baselineValues(20l, 50l)
.build()
.run();
}
@Test // see DRILL-3657
public void testConstantsInMultiplePartitions() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format(
"select sum(1) over(partition by b1 order by a1) as sum1, sum(1) over(partition by a1) as sum2, rank() over(order by b1) as rank1, rank() over(order by 1) as rank2 \n" +
"from dfs_test.`%s` \n" +
"order by 1, 2, 3, 4", root);
// Validate the plan
final String[] expectedPlan = {"Window.*SUM\\(\\$3\\).*\n" +
".*SelectionVectorRemover.*\n" +
".*Sort.*\n" +
".*Window.*SUM\\(\\$2\\).*"
};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("sum1", "sum2", "rank1", "rank2")
.baselineValues(2l, 5l, 1l, 1l)
.baselineValues(2l, 5l, 1l, 1l)
.baselineValues(2l, 5l, 6l, 1l)
.baselineValues(2l, 5l, 6l, 1l)
.baselineValues(3l, 5l, 3l, 1l)
.baselineValues(3l, 5l, 3l, 1l)
.baselineValues(3l, 5l, 3l, 1l)
.baselineValues(3l, 5l, 8l, 1l)
.baselineValues(3l, 5l, 8l, 1l)
.baselineValues(3l, 5l, 8l, 1l)
.build()
.run();
}
@Test // DRILL-3580
public void testExpressionInWindowFunction() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select a1, b1, sum(b1) over (partition by a1) as c1, sum(a1 + b1) over (partition by a1) as c2\n" +
"from dfs_test.`%s`", root);
// Validate the plan
final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{0\\} order by \\[\\].*\\[SUM\\(\\$1\\), SUM\\(\\$2\\)\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("a1", "b1", "c1", "c2")
.baselineValues(0l, 1l, 8l, 8l)
.baselineValues(0l, 1l, 8l, 8l)
.baselineValues(0l, 2l, 8l, 8l)
.baselineValues(0l, 2l, 8l, 8l)
.baselineValues(0l, 2l, 8l, 8l)
.baselineValues(10l, 3l, 21l, 71l)
.baselineValues(10l, 3l, 21l, 71l)
.baselineValues(10l, 5l, 21l, 71l)
.baselineValues(10l, 5l, 21l, 71l)
.baselineValues(10l, 5l, 21l, 71l)
.build()
.run();
}
@Test // see DRILL-3657
public void testProjectPushPastWindow() throws Exception {
String query = "select sum(n_nationkey) over(partition by 1 order by 1) as col1, \n" +
"count(n_nationkey) over(partition by 1 order by 1) as col2 \n" +
"from cp.`tpch/nation.parquet` \n" +
"limit 5";
// Validate the plan
final String[] expectedPlan = {"Scan.*columns=\\[`n_nationkey`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col1", "col2")
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.build()
.run();
}
}
|
exec/java-exec/src/test/java/org/apache/drill/exec/TestWindowFunctions.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.util.FileUtils;
import org.apache.drill.common.util.TestTools;
import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
import org.apache.drill.exec.work.foreman.UnsupportedFunctionException;
import org.apache.drill.PlanTestBase;
import org.junit.Test;
public class TestWindowFunctions extends BaseTestQuery {
static final String WORKING_PATH = TestTools.getWorkingPath();
static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
private static void throwAsUnsupportedException(UserException ex) throws Exception {
SqlUnsupportedException.errorClassNameToException(ex.getOrCreatePBError(false).getException().getExceptionClass());
throw ex;
}
@Test // DRILL-3196
public void testSinglePartition() throws Exception {
final String query = "explain plan for select sum(a2) over(partition by a2), count(*) over(partition by a2) \n" +
"from cp.`tpch/nation.parquet`";
test(query);
}
@Test // DRILL-3196
public void testSinglePartitionDefinedInWindowList() throws Exception {
final String query = "explain plan for select sum(a2) over w \n" +
"from cp.`tpch/nation.parquet` \n" +
"window w as (partition by a2 order by a2)";
test(query);
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3182
public void testWindowFunctionWithDistinct() throws Exception {
try {
final String query = "explain plan for select a2, count(distinct b2) over(partition by a2) \n" +
"from cp.`tpch/nation.parquet`";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3188
public void testWindowFrame() throws Exception {
try {
final String query = "select a2, sum(a2) over(partition by a2 order by a2 rows between 1 preceding and 1 following ) \n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by a2";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3188
public void testRowsUnboundedPreceding() throws Exception {
try {
final String query = "explain plan for select sum(n_nationKey) over(partition by n_nationKey order by n_nationKey \n" +
"rows UNBOUNDED PRECEDING)" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3359
public void testFramesDefinedInWindowClause() throws Exception {
try {
final String query = "explain plan for select sum(n_nationKey) over w \n" +
"from cp.`tpch/nation.parquet` \n" +
"window w as (partition by n_nationKey order by n_nationKey rows UNBOUNDED PRECEDING)";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3326
public void testWindowWithAlias() throws Exception {
try {
String query = "explain plan for SELECT sum(n_nationkey) OVER (PARTITION BY n_name ORDER BY n_name ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING) as col2 \n" +
"from cp.`tpch/nation.parquet`";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test(expected = UnsupportedFunctionException.class) // DRILL-3189
public void testWindowWithAllowDisallow() throws Exception {
try {
final String query = "select sum(n_nationKey) over(partition by n_nationKey \n" +
"rows between unbounded preceding and unbounded following disallow partial) \n" +
"from cp.`tpch/nation.parquet` \n" +
"order by n_nationKey";
test(query);
} catch(UserException ex) {
throwAsUnsupportedException(ex);
throw ex;
}
}
@Test // DRILL-3344
public void testWindowGroupBy() throws Exception {
String query = "explain plan for SELECT max(n_nationkey) OVER (), n_name as col2 \n" +
"from cp.`tpch/nation.parquet` \n" +
"group by n_name";
parseErrorHelper(query);
}
@Test // DRILL-3346
public void testWindowGroupByOnView() throws Exception {
try {
String createView = "create view testWindowGroupByOnView(a, b) as \n" +
"select n_nationkey, n_name from cp.`tpch/nation.parquet`";
String query = "explain plan for SELECT max(a) OVER (), b as col2 \n" +
"from testWindowGroupByOnView \n" +
"group by b";
test("use dfs_test.tmp");
test(createView);
parseErrorHelper(query);
} finally {
test("drop view testWindowGroupByOnView");
}
}
@Test // DRILL-3188
public void testWindowFrameEquivalentToDefault() throws Exception {
final String query1 = "explain plan for select sum(n_nationKey) over(partition by n_nationKey order by n_nationKey) \n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
final String query2 = "explain plan for select sum(n_nationKey) over(partition by n_nationKey order by n_nationKey \n" +
"range between unbounded preceding and current row) \n" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
final String query3 = "explain plan for select sum(n_nationKey) over(partition by n_nationKey \n" +
"rows BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)" +
"from cp.`tpch/nation.parquet` t \n" +
"order by n_nationKey";
test(query1);
test(query2);
test(query3);
}
@Test // DRILL-3204
public void testWindowWithJoin() throws Exception {
final String query = "select sum(t1.r_regionKey) over(partition by t1.r_regionKey) \n" +
"from cp.`tpch/region.parquet` t1, cp.`tpch/nation.parquet` t2 \n" +
"where t1.r_regionKey = t2.n_nationKey \n" +
"group by t1.r_regionKey";
test(query);
}
@Test // DRILL-3298
public void testCountEmptyPartitionByWithExchange() throws Exception {
String query = String.format("select count(*) over (order by o_orderpriority) as cnt from dfs.`%s/multilevel/parquet` where o_custkey < 100", TEST_RES_PATH);
try {
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("cnt")
.optionSettingQueriesForTestQuery("alter session set `planner.slice_target` = 1")
.baselineValues(1l)
.baselineValues(4l)
.baselineValues(4l)
.baselineValues(4l)
.build().run();
} finally {
test("alter session set `planner.slice_target` = " + ExecConstants.SLICE_TARGET_DEFAULT);
}
}
/* Verify the output of aggregate functions (which are reduced
* eg: avg(x) = sum(x)/count(x)) return results of the correct
* data type (double)
*/
@Test
public void testAvgVarianceWindowFunctions() throws Exception {
final String avgQuery = "select avg(n_nationkey) over (partition by n_nationkey) col1 " +
"from cp.`tpch/nation.parquet` " +
"where n_nationkey = 1";
testBuilder()
.sqlQuery(avgQuery)
.unOrdered()
.baselineColumns("col1")
.baselineValues(1.0d)
.go();
final String varianceQuery = "select var_pop(n_nationkey) over (partition by n_nationkey) col1 " +
"from cp.`tpch/nation.parquet` " +
"where n_nationkey = 1";
testBuilder()
.sqlQuery(varianceQuery)
.unOrdered()
.baselineColumns("col1")
.baselineValues(0.0d)
.go();
}
@Test
public void testWindowFunctionWithKnownType() throws Exception {
final String query = "select sum(cast(col_int as int)) over (partition by col_varchar) as col1 " +
"from cp.`jsoninput/large_int.json` limit 1";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col1")
.baselineValues(2147483649l)
.go();
final String avgQuery = "select avg(cast(col_int as int)) over (partition by col_varchar) as col1 " +
"from cp.`jsoninput/large_int.json` limit 1";
testBuilder()
.sqlQuery(avgQuery)
.unOrdered()
.baselineColumns("col1")
.baselineValues(1.0737418245E9d)
.go();
}
@Test
public void testCompoundIdentifierInWindowDefinition() throws Exception {
String root = FileUtils.getResourceAsFile("/multilevel/csv/1994/Q1/orders_94_q1.csv").toURI().toString();
String query = String.format("SELECT count(*) OVER w as col1, count(*) OVER w as col2 \n" +
"FROM dfs_test.`%s` \n" +
"WINDOW w AS (PARTITION BY columns[1] ORDER BY columns[0] DESC)", root);
// Validate the result
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("col1", "col2")
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.baselineValues((long) 1, (long) 1)
.build()
.run();
}
@Test
public void testRankWithGroupBy() throws Exception {
final String query = "select dense_rank() over (order by l_suppkey) as rank1 " +
" from cp.`tpch/lineitem.parquet` group by l_partkey, l_suppkey order by 1 desc limit 1";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("rank1")
.baselineValues(100l)
.go();
}
@Test // DRILL-3404
public void testWindowSumAggIsNotNull() throws Exception {
String query = String.format("select count(*) cnt from (select sum ( c1 ) over ( partition by c2 order by c1 asc nulls first ) w_sum from dfs.`%s/window/table_with_nulls.parquet` ) sub_query where w_sum is not null", TEST_RES_PATH);
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("cnt")
.baselineValues(26l)
.build().run();
}
@Test // DRILL-3292
public void testWindowConstants() throws Exception {
String query = "select rank() over w fn, sum(2) over w sumINTEGER, sum(employee_id) over w sumEmpId, sum(0.5) over w sumFLOAT \n" +
"from cp.`employee.json` \n" +
"where position_id = 2 \n" +
"window w as(partition by position_id order by employee_id)";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("fn", "sumINTEGER", "sumEmpId", "sumFLOAT")
.baselineValues(1l, 2l, 2l, 0.5)
.baselineValues(2l, 4l, 6l, 1.0)
.baselineValues(3l, 6l, 11l, 1.5)
.baselineValues(4l, 8l, 31l, 2.0)
.baselineValues(5l, 10l, 52l, 2.5)
.baselineValues(6l, 12l, 74l, 3.0)
.build()
.run();
}
@Test // DRILL-3567
public void testMultiplePartitions1() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select count(*) over(partition by b1 order by c1) as count1, \n" +
"sum(a1) over(partition by b1 order by c1) as sum1, \n" +
"count(*) over(partition by a1 order by c1) as count2 \n" +
"from dfs_test.`%s`", root);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("count1", "sum1", "count2")
.baselineValues(1l, 0l, 2l)
.baselineValues(1l, 0l, 2l)
.baselineValues(2l, 0l, 5l)
.baselineValues(3l, 0l, 5l)
.baselineValues(3l, 0l, 5l)
.baselineValues(1l, 10l, 2l)
.baselineValues(1l, 10l, 2l)
.baselineValues(2l, 20l, 5l)
.baselineValues(3l, 30l, 5l)
.baselineValues(3l, 30l, 5l)
.build()
.run();
}
@Test // DRILL-3567
public void testMultiplePartitions2() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select count(*) over(partition by b1 order by c1) as count1, \n" +
"count(*) over(partition by a1 order by c1) as count2, \n" +
"sum(a1) over(partition by b1 order by c1) as sum1 \n" +
"from dfs_test.`%s`", root);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("count1", "count2", "sum1")
.baselineValues(1l, 2l, 0l)
.baselineValues(1l, 2l, 0l)
.baselineValues(2l, 5l, 0l)
.baselineValues(3l, 5l, 0l)
.baselineValues(3l, 5l, 0l)
.baselineValues(1l, 2l, 10l)
.baselineValues(1l, 2l, 10l)
.baselineValues(2l, 5l, 20l)
.baselineValues(3l, 5l, 30l)
.baselineValues(3l, 5l, 30l)
.build()
.run();
}
@Test // see DRILL-3574
public void testWithAndWithoutPartitions() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select sum(a1) over(partition by b1, c1) as s1, sum(a1) over() as s2 \n" +
"from dfs_test.`%s` \n" +
"order by a1", root);
test("alter session set `planner.slice_target` = 1");
// Validate the plan
final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{\\}.*\n" +
".*UnionExchange"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("s1", "s2")
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(0l, 50l)
.baselineValues(10l, 50l)
.baselineValues(10l, 50l)
.baselineValues(10l, 50l)
.baselineValues(20l, 50l)
.baselineValues(20l, 50l)
.build()
.run();
}
@Test // see DRILL-3657
public void testConstantsInMultiplePartitions() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format(
"select sum(1) over(partition by b1 order by a1) as sum1, sum(1) over(partition by a1) as sum2, rank() over(order by b1) as rank1, rank() over(order by 1) as rank2 \n" +
"from dfs_test.`%s` \n" +
"order by 1, 2, 3, 4", root);
// Validate the plan
final String[] expectedPlan = {"Window.*SUM\\(\\$3\\).*\n" +
".*SelectionVectorRemover.*\n" +
".*Sort.*\n" +
".*Window.*SUM\\(\\$2\\).*"
};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("sum1", "sum2", "rank1", "rank2")
.baselineValues(2l, 5l, 1l, 1l)
.baselineValues(2l, 5l, 1l, 1l)
.baselineValues(2l, 5l, 6l, 1l)
.baselineValues(2l, 5l, 6l, 1l)
.baselineValues(3l, 5l, 3l, 1l)
.baselineValues(3l, 5l, 3l, 1l)
.baselineValues(3l, 5l, 3l, 1l)
.baselineValues(3l, 5l, 8l, 1l)
.baselineValues(3l, 5l, 8l, 1l)
.baselineValues(3l, 5l, 8l, 1l)
.build()
.run();
}
@Test // DRILL-3580
public void testExpressionInWindowFunction() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
String query = String.format("select a1, b1, sum(b1) over (partition by a1) as c1, sum(a1 + b1) over (partition by a1) as c2\n" +
"from dfs_test.`%s`", root);
// Validate the plan
final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{0\\} order by \\[\\].*\\[SUM\\(\\$1\\), SUM\\(\\$2\\)\\]"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("a1", "b1", "c1", "c2")
.baselineValues(0l, 1l, 8l, 8l)
.baselineValues(0l, 1l, 8l, 8l)
.baselineValues(0l, 2l, 8l, 8l)
.baselineValues(0l, 2l, 8l, 8l)
.baselineValues(0l, 2l, 8l, 8l)
.baselineValues(10l, 3l, 21l, 71l)
.baselineValues(10l, 3l, 21l, 71l)
.baselineValues(10l, 5l, 21l, 71l)
.baselineValues(10l, 5l, 21l, 71l)
.baselineValues(10l, 5l, 21l, 71l)
.build()
.run();
}
@Test // see DRILL-3657
public void testProjectPushPastWindow() throws Exception {
String query = "select sum(n_nationkey) over(partition by 1 order by 1) as col1, \n" +
"count(n_nationkey) over(partition by 1 order by 1) as col2 \n" +
"from cp.`tpch/nation.parquet` \n" +
"limit 5";
// Validate the plan
final String[] expectedPlan = {"Scan.*columns=\\[`n_nationkey`\\].*"};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("col1", "col2")
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.baselineValues(300l, 25l)
.build()
.run();
}
}
|
DRILL-3683: Add baseline and expected plan for TestWindowFunctions suite
|
exec/java-exec/src/test/java/org/apache/drill/exec/TestWindowFunctions.java
|
DRILL-3683: Add baseline and expected plan for TestWindowFunctions suite
|
|
Java
|
apache-2.0
|
aaab3fa137d648b9d7b729034498157c4687b3a0
| 0
|
MToLinux/Nginxlib
|
package org.cs2c.nginlib.config;
import java.util.ArrayList;
import java.util.List;
public class RecDirective implements Directive,Element{
private String directiveName=null;
private String directiveValue=null;
public void SetDirectiveText(String DirectiveText) {
directiveValue = DirectiveText;
}
@Override
public void setName(String name) {
directiveName = name;
}
@Override
public String getName() {
return directiveName;
}
@Override
public List<Parameter> getParameters() {
// Directive_name Option=9 StringParameter $Variable
String divalue = directiveValue.substring(0, directiveValue.length()-1);
String[] lineArray=divalue.split(" ");
List<Parameter> listParam = new ArrayList<Parameter>();
for(int i=1;i<lineArray.length;i++){
if(lineArray[i].contains("=")){
RecOption objOption = new RecOption();
String[] lineOption=lineArray[i].split("=");
objOption.setName(lineOption[0]);
objOption.setValue(lineOption[1]);
listParam.add(objOption);
}else if(lineArray[i].contains("$")){
RecVariable objVariable = new RecVariable();
objVariable.setName(lineArray[i].substring(1, lineArray[i].length()-1));
listParam.add(objVariable);
}else if(lineArray[i].length() == 0){
}else{
RecStringParameter objStringParameter = new RecStringParameter();
objStringParameter.setValue(lineArray[i]);
listParam.add(objStringParameter);
}
}
return listParam;
}
@Override
public String toString(){
return directiveValue;
}
@Override
public void addParameter(Parameter parameter) {
StringBuilder sb = new StringBuilder();
sb.append(directiveValue.substring(0, directiveValue.length()-1)+" ");
sb.append(parameter.toString());
sb.append(directiveValue.substring(directiveValue.length()-1, directiveValue.length()));
directiveValue = sb.toString();
}
@Override
public Element clone() throws CloneNotSupportedException{
RecDirective obj=null;
obj=(RecDirective) super.clone();
return obj;
}
}
|
Nginlib/src/org/cs2c/nginlib/config/RecDirective.java
|
package org.cs2c.nginlib.config;
import java.util.ArrayList;
import java.util.List;
public class RecDirective implements Directive,Element{
private String directiveName=null;
private String directiveValue=null;
public void SetDirectiveText(String DirectiveText) {
directiveValue = DirectiveText;
}
@Override
public void setName(String name) {
directiveName = name;
}
@Override
public String getName() {
return directiveName;
}
@Override
public List<Parameter> getParameters() {
// Directive_name Option=9 StringParameter $Variable
String[] lineArray=directiveValue.split(" ");
List<Parameter> listParam = new ArrayList<Parameter>();
for(int i=1;i<lineArray.length;i++){
if(lineArray[i].contains("=")){
RecOption objOption = new RecOption();
String[] lineOption=lineArray[i].split("=");
objOption.setName(lineOption[0]);
objOption.setValue(lineOption[1]);
listParam.add(objOption);
}else if(lineArray[i].contains("$")){
RecVariable objVariable = new RecVariable();
objVariable.setName(lineArray[i].substring(1, lineArray[i].length()-1));
listParam.add(objVariable);
}else if(lineArray[i].length() == 0){
}else{
RecStringParameter objStringParameter = new RecStringParameter();
objStringParameter.setValue(lineArray[i]);
listParam.add(objStringParameter);
}
}
return listParam;
}
@Override
public String toString(){
return directiveValue;
}
@Override
public void addParameter(Parameter parameter) {
StringBuilder sb = new StringBuilder();
sb.append(directiveValue.substring(0, directiveValue.length()-1)+" ");
sb.append(parameter.toString());
sb.append(directiveValue.substring(directiveValue.length()-1, directiveValue.length()));
directiveValue = sb.toString();
}
@Override
public Element clone() throws CloneNotSupportedException{
RecDirective obj=null;
obj=(RecDirective) super.clone();
return obj;
}
}
|
fix Stringparam's ";"
|
Nginlib/src/org/cs2c/nginlib/config/RecDirective.java
|
fix Stringparam's ";"
|
|
Java
|
apache-2.0
|
d4713bfb94396e960f009912bcbeee2788940695
| 0
|
BrunoEberhard/minimal-j,BrunoEberhard/minimal-j,BrunoEberhard/minimal-j
|
package ch.openech.mj.db;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import ch.openech.mj.criteria.Criteria;
import ch.openech.mj.criteria.Criteria.SimpleCriteria;
import ch.openech.mj.edit.value.CloneHelper;
import ch.openech.mj.model.EnumUtils;
import ch.openech.mj.model.Keys;
import ch.openech.mj.model.PropertyInterface;
import ch.openech.mj.model.Search;
import ch.openech.mj.model.ViewUtil;
import ch.openech.mj.model.properties.ChainedProperty;
import ch.openech.mj.model.properties.InlineProperty;
import ch.openech.mj.model.properties.SimpleProperty;
import ch.openech.mj.util.FieldUtils;
import ch.openech.mj.util.GenericUtils;
import ch.openech.mj.util.IdUtils;
import ch.openech.mj.util.LoggingRuntimeException;
import ch.openech.mj.util.StringUtils;
/**
* Minimal-J internal<p>
*
* Base class of all table representing classes in this persistence layer.
* Normally you should not need to extend from this class directly. Use
* the existing subclasses or only the methods in DbPersistence.
*
*/
public abstract class AbstractTable<T> {
public static final Logger sqlLogger = Logger.getLogger("SQL");
protected final DbPersistence dbPersistence;
protected final DbPersistenceHelper helper;
protected final Class<T> clazz;
protected final LinkedHashMap<String, PropertyInterface> columns;
protected final LinkedHashMap<String, PropertyInterface> lists;
protected final String name;
protected final List<Index<T>> indexes = new ArrayList<>();
protected final Map<Search<T>, MultiIndex<T>> searches = new HashMap<>();
protected final Map<Connection, Map<String, PreparedStatement>> statements = new HashMap<>();
protected final String selectByIdQuery;
protected final String insertQuery;
protected final String selectMaxIdQuery;
protected final String clearQuery;
public AbstractTable(DbPersistence dbPersistence, String name, Class<T> clazz) {
this.dbPersistence = dbPersistence;
this.helper = new DbPersistenceHelper(dbPersistence);
this.name = name != null ? name : StringUtils.toDbName(clazz.getSimpleName());
this.clazz = clazz;
this.columns = findColumns(clazz);
this.lists = findLists(clazz);
this.selectByIdQuery = selectByIdQuery();
this.insertQuery = insertQuery();
this.selectMaxIdQuery = selectMaxIdQuery();
this.clearQuery = clearQuery();
findImmutables();
findIndexes();
findSearches();
}
private LinkedHashMap<String, PropertyInterface> findColumns(Class<?> clazz) {
LinkedHashMap<String, PropertyInterface> columns = new LinkedHashMap<String, PropertyInterface>();
for (Field field : clazz.getFields()) {
if (!FieldUtils.isPublic(field) || FieldUtils.isStatic(field) || FieldUtils.isTransient(field)) continue;
String fieldName = StringUtils.toDbName(field.getName());
if (fieldName.equals("ID") && FieldUtils.isAllowedId(field.getType())) continue;
if (fieldName.equals("VERSION") && FieldUtils.isAllowedVersionType(field.getType())) continue;
if (FieldUtils.isList(field)) continue;
if (FieldUtils.isFinal(field) && !FieldUtils.isSet(field)) {
if (!dbPersistence.isImmutable(field.getType())) {
Map<String, PropertyInterface> inlinePropertys = findColumns(field.getType());
boolean hasClassName = FieldUtils.hasClassName(field);
for (String inlineKey : inlinePropertys.keySet()) {
String key = inlineKey;
if (!hasClassName) {
key = fieldName + "_" + inlineKey;
}
columns.put(key, new ChainedProperty(clazz, field, inlinePropertys.get(inlineKey)));
}
} else {
columns.put(fieldName, new InlineProperty(clazz, field));
}
} else {
columns.put(fieldName, new SimpleProperty(clazz, field));
}
}
return columns;
}
private LinkedHashMap<String, PropertyInterface> findLists(Class<?> clazz) {
LinkedHashMap<String, PropertyInterface> properties = new LinkedHashMap<String, PropertyInterface>();
for (Field field : clazz.getFields()) {
if (!FieldUtils.isPublic(field) || FieldUtils.isStatic(field) || FieldUtils.isTransient(field)) continue;
if (!dbPersistence.isImmutable(field.getType()) && FieldUtils.isFinal(field) && !FieldUtils.isList(field)) {
// This is needed to check if an inline Property contains a List
Map<String, PropertyInterface> inlinePropertys = findLists(field.getType());
boolean hasClassName = FieldUtils.hasClassName(field);
for (String inlineKey : inlinePropertys.keySet()) {
String key = inlineKey;
if (!hasClassName) {
key = field.getName() + StringUtils.upperFirstChar(inlineKey);
}
properties.put(key, new ChainedProperty(clazz, field, inlinePropertys.get(inlineKey)));
}
} else if (FieldUtils.isList(field)) {
properties.put(field.getName(), new SimpleProperty(clazz, field));
}
}
return properties;
}
protected LinkedHashMap<String, PropertyInterface> getColumns() {
return columns;
}
protected LinkedHashMap<String, PropertyInterface> getLists() {
return lists;
}
protected Collection<Index<T>> getIndexes() {
return indexes;
}
protected PreparedStatement getStatement(Connection connection, String query, boolean returnGeneratedKeys) throws SQLException {
if (!statements.containsKey(connection)) {
statements.put(connection, new HashMap<String, PreparedStatement>());
}
Map<String, PreparedStatement> statementsForConnection = statements.get(connection);
if (!statementsForConnection.containsKey(query)) {
statementsForConnection.put(query, createStatement(connection, query, returnGeneratedKeys));
}
return statementsForConnection.get(query);
}
PreparedStatement createStatement(Connection connection, String query, boolean returnGeneratedKeys) throws SQLException {
if (returnGeneratedKeys) {
if (sqlLogger.isLoggable(Level.FINE)) {
return new LoggingPreparedStatement(connection, query, Statement.RETURN_GENERATED_KEYS, sqlLogger);
} else {
return connection.prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
}
} else {
if (sqlLogger.isLoggable(Level.FINE)) {
return new LoggingPreparedStatement(connection, query, sqlLogger);
} else {
return connection.prepareStatement(query);
}
}
}
public int getMaxId() {
try {
PreparedStatement statement = getStatement(dbPersistence.getConnection(), selectMaxIdQuery, false);
try (ResultSet resultSet = statement.executeQuery()) {
if (resultSet.next()) {
return resultSet.getInt(1);
} else {
return 0;
}
} catch (SQLException x) {
throw new LoggingRuntimeException(x, sqlLogger, "Couldn't get max Id of " + getTableName());
}
} catch (SQLException x) {
throw new LoggingRuntimeException(x, sqlLogger, "Couldn't get max Id of " + getTableName());
}
}
public void create() throws SQLException {
DbCreator creator = new DbCreator(dbPersistence);
creator.create(dbPersistence.getConnection(), this);
}
public void clear() {
try {
PreparedStatement statement = getStatement(dbPersistence.getConnection(), clearQuery, false);
statement.execute();
} catch (SQLException x) {
throw new LoggingRuntimeException(x, sqlLogger, "Clear of Table " + getTableName() + " failed");
}
}
public List<T> search(Search<T> search, Object query) {
MultiIndex<T> multiIndex = searches.get(search);
List<Long> ids = multiIndex.findIds(query);
List<T> objects = new ArrayList<>(ids.size());
for (long id : ids) {
objects.add(multiIndex.lookup(id));
}
return objects;
}
public List<T> read(Criteria criteria) {
if (criteria instanceof SimpleCriteria) {
SimpleCriteria simpleCriteria = (SimpleCriteria) criteria;
PropertyInterface propertyInterface = Keys.getProperty(simpleCriteria.getKey());
String query = "select * from " + getTableName() + " where " + whereStatement(propertyInterface.getFieldPath());
try {
PreparedStatement statement = getStatement(dbPersistence.getConnection(), query, false);
Object value = simpleCriteria.getValue();
// TODO merge with setParameter
if (DbPersistenceHelper.isView(propertyInterface)) {
value = IdUtils.getId(value);
}
statement.setObject(1, value);
return executeSelectAll(statement);
} catch (SQLException e) {
throw new LoggingRuntimeException(e, sqlLogger, "read with ReferenceCriteria failed");
}
}
throw new IllegalArgumentException(criteria + " not yet implemented");
}
private String whereStatement(final String wholeFieldPath) {
String fieldPath = wholeFieldPath;
String column;
while (true) {
column = findColumn(fieldPath);
if (column != null) break;
int pos = fieldPath.lastIndexOf('.');
if (pos < 0) throw new IllegalArgumentException("FieldPath " + wholeFieldPath + " not even partially found in " + getTableName());
fieldPath = fieldPath.substring(0, pos);
}
if (fieldPath.length() < wholeFieldPath.length()) {
String restOfFieldPath = wholeFieldPath.substring(fieldPath.length() + 1);
PropertyInterface subProperty = columns.get(column);
AbstractTable<?> subTable = dbPersistence.getTable(subProperty.getFieldClazz());
return column + " = select (ID from " + subTable.getTableName() + " where " + subTable.whereStatement(restOfFieldPath) + ")";
} else {
return column + " = ?";
}
}
private String findColumn(String fieldPath) {
for (Map.Entry<String, PropertyInterface> entry : columns.entrySet()) {
if (entry.getValue().getFieldPath().equals(fieldPath)) {
return entry.getKey();
}
}
return null;
}
protected String getTableName() {
return name;
}
public Class<T> getClazz() {
return clazz;
}
private void findImmutables() {
for (Map.Entry<String, PropertyInterface> column : getColumns().entrySet()) {
PropertyInterface property = column.getValue();
if (DbPersistenceHelper.isView(property)) continue;
if (DbPersistenceHelper.isReference(property)) {
AbstractTable<?> refTable = dbPersistence.getImmutableTable(property.getFieldClazz());
if (refTable == null) {
dbPersistence.addImmutableClass(property.getFieldClazz());
}
}
}
}
private void findSearches() {
System.out.println("Find Searches in " + clazz.getSimpleName());
for (Field field : clazz.getFields()) {
if (!FieldUtils.isFinal(field) || !FieldUtils.isStatic(field)) continue;
if (field.getType() == Search.class) {
@SuppressWarnings("unchecked")
Search<T> search = (Search<T>) FieldUtils.getStaticValue(field);
createFulltextIndex(search);
}
}
}
private void findIndexes() {
// for (Map.Entry<String, PropertyInterface> column : columns.entrySet()) {
// PropertyInterface property = column.getValue();
// if (property.getType() instanceof Reference<?>) {
// createIndex(property, property.getFieldPath());
// }
// }
}
// execution helpers
protected long executeInsertWithAutoIncrement(PreparedStatement statement, T object) throws SQLException {
return executeInsertWithAutoIncrement(statement, object, null);
}
protected long executeInsertWithAutoIncrement(PreparedStatement statement, T object, Integer hash) throws SQLException {
setParameters(statement, object, false, true, hash);
statement.execute();
try (ResultSet autoIncrementResultSet = statement.getGeneratedKeys()) {
autoIncrementResultSet.next();
long id = autoIncrementResultSet.getLong(1);
if (sqlLogger.isLoggable(Level.FINE)) sqlLogger.fine("AutoIncrement is " + id);
return id;
}
}
protected void executeInsert(PreparedStatement statement, T object) throws SQLException {
setParameters(statement, object);
statement.execute();
}
protected T executeSelect(PreparedStatement preparedStatement) throws SQLException {
return executeSelect(preparedStatement, null);
}
protected T executeSelect(PreparedStatement preparedStatement, Integer time) throws SQLException {
try (ResultSet resultSet = preparedStatement.executeQuery()) {
if (resultSet.next()) {
return readResultSetRow(resultSet, time);
} else {
return null;
}
}
}
protected List<T> executeSelectAll(PreparedStatement preparedStatement) throws SQLException {
List<T> result = new ArrayList<T>();
try (ResultSet resultSet = preparedStatement.executeQuery()) {
while (resultSet.next()) {
T object = readResultSetRow(resultSet, null);
result.add(object);
}
}
return result;
}
protected T readResultSetRow(ResultSet resultSet, Integer time) throws SQLException {
T result = CloneHelper.newInstance(clazz);
for (int columnIndex = 1; columnIndex <= resultSet.getMetaData().getColumnCount(); columnIndex++) {
String columnName = resultSet.getMetaData().getColumnName(columnIndex);
if ("ID".equalsIgnoreCase(columnName) && this instanceof Table) {
IdUtils.setId(result, resultSet.getLong(columnIndex));
continue;
} else if ("VERSION".equalsIgnoreCase(columnName) && this instanceof HistorizedTable) {
IdUtils.setVersion(result, resultSet.getInt(columnIndex));
continue;
}
PropertyInterface property = columns.get(columnName);
if (property == null) continue;
Object value = resultSet.getObject(columnIndex);
if (value != null) {
Class<?> fieldClass = property.getFieldClazz();
if (DbPersistenceHelper.isView(property)) {
Class<?> viewedClass = DbPersistenceHelper.getViewedClass(property);
Table<?> referenceTable = (Table<?>) dbPersistence.getTable(viewedClass);
Object referenceObject = referenceTable.read(((Number) value).longValue(), false); // false -> subEntities not loaded
value = CloneHelper.newInstance(fieldClass);
ViewUtil.view(referenceObject, value);
} else if (DbPersistenceHelper.isReference(property)) {
if (!dbPersistence.isImmutable(fieldClass)) {
value = CloneHelper.newInstance(fieldClass);
IdUtils.setId(value, (Long) value);
} else {
value = dereference(fieldClass, IdUtils.convertToLong(value), time);
}
} else if (fieldClass == Set.class) {
Set<?> set = (Set<?>) property.getValue(result);
Class<?> enumClass = GenericUtils.getGenericClass(property.getType());
EnumUtils.fillSet((int) value, enumClass, set);
continue; // skip setValue, it's final
} else {
value = helper.convertToFieldClass(fieldClass, value);
}
property.setValue(result, value);
}
}
return result;
}
protected <D> Object dereference(Class<D> clazz, long id, Integer time) {
AbstractTable<D> table = dbPersistence.getTable(clazz);
if (table instanceof ImmutableTable) {
return ((ImmutableTable<?>) table).read(id);
} else if (table instanceof HistorizedTable<?>) {
return ((HistorizedTable<?>) table).read(id, time);
} else if (table instanceof Table) {
return ((Table<?>) table).read(id);
} else {
throw new IllegalArgumentException("Clazz: " + clazz);
}
}
/**
* Search or create an immutable.<br>
* At the moment no references of other values than immutables are allowed.
*
* @param value the object from which to get the reference.
* @param insertIfNotExisting true => create if not existing
* @return <code>if value not found and parameter insert is false
* @throws SQLException
*/
private <D> Long getIdOfImmutable(D value, boolean insertIfNotExisting) throws SQLException {
@SuppressWarnings("unchecked")
Class<D> clazz = (Class<D>) value.getClass();
AbstractTable<D> abstractTable = dbPersistence.getTable(clazz);
if (abstractTable == null) {
throw new IllegalArgumentException(clazz.getName());
}
if (abstractTable instanceof ImmutableTable) {
return ((ImmutableTable<D>) abstractTable).getOrCreateId(value);
} else {
throw new IllegalArgumentException(clazz.getName());
}
}
protected int setParameters(PreparedStatement statement, T object) throws SQLException {
return setParameters(statement, object, null);
}
protected int setParameters(PreparedStatement statement, T object, Integer hash) throws SQLException {
return setParameters(statement, object, false, false, hash);
}
protected int setParameters(PreparedStatement statement, T object, boolean doubleValues, boolean insert) throws SQLException {
return setParameters(statement, object, doubleValues, insert, null);
}
protected int setParameters(PreparedStatement statement, T object, boolean doubleValues, boolean insert, Integer hash) throws SQLException {
int parameterPos = 1;
for (Map.Entry<String, PropertyInterface> column : columns.entrySet()) {
PropertyInterface property = column.getValue();
Object value = property.getValue(object);
if (value != null) {
if (DbPersistenceHelper.isView(property)) {
value = IdUtils.getId(value);
} else if (DbPersistenceHelper.isReference(property)) {
value = getIdOfImmutable(value, insert);
}
}
helper.setParameter(statement, parameterPos++, value, property);
if (doubleValues) helper.setParameter(statement, parameterPos++, value, property);
}
if (hash != null) {
statement.setInt(parameterPos++, hash);
if (doubleValues) statement.setInt(parameterPos++, hash);
}
return parameterPos;
}
protected abstract String insertQuery();
protected abstract String selectByIdQuery();
protected String selectMaxIdQuery() {
StringBuilder query = new StringBuilder();
query.append("SELECT MAX(id) FROM "); query.append(getTableName());
return query.toString();
}
protected String clearQuery() {
StringBuilder query = new StringBuilder();
query.append("DELETE FROM "); query.append(getTableName());
return query.toString();
}
protected String selectIdQuery() {
StringBuilder where = new StringBuilder();
boolean first = true;
for (String key : columns.keySet()) {
if (!first) where.append(" AND "); else first = false;
// where.append(column.getName()); where.append(" = ?");
// doesnt work for null so pattern is:
// ((? IS NULL AND col1 IS NULL) OR col1 = ?)
where.append("((? IS NULL AND "); where.append(key); where.append(" IS NULL) OR ");
where.append(key); where.append(" = ?)");
}
if (this instanceof Table) {
where.append(" AND ((? IS NULL) OR event <= ?)");
where.append(" AND (endEvent IS NULL OR (endEvent IS NOT NULL AND (? IS NULL OR ? < endEvent)))");
}
StringBuilder query = new StringBuilder();
query.append("SELECT id FROM "); query.append(getTableName()); query.append(" WHERE ");
query.append(where);
return query.toString();
}
private void createFulltextIndex(Search<T> search) {
Object[] keys = search.getKeys();
ColumnIndex<T>[] indexes = new ColumnIndex[keys.length];
for (int i = 0; i<keys.length; i++) {
PropertyInterface property = Keys.getProperty(keys[i]);
if (property.getFieldClazz() == String.class) {
indexes[i] = createFulltextIndex(keys[i]);
} else {
indexes[i] = createIndex(keys[i]);
}
}
MultiIndex<T> index = new MultiIndex<T>(indexes);
searches.put(search, index);
}
private ColumnIndex<T> createFulltextIndex(Object key) {
PropertyInterface property = Keys.getProperty(key);
String fieldPath = property.getFieldPath();
ColumnIndex<T> index = createFulltextIndex(property, fieldPath);
return index;
}
private ColumnIndex<T> createFulltextIndex(PropertyInterface property, String fieldPath) {
ColumnIndex<T> result;
Map.Entry<String, PropertyInterface> entry = findX(fieldPath);
String myFieldPath = entry.getValue().getFieldPath();
if (fieldPath.length() > myFieldPath.length()) {
String rest = fieldPath.substring(myFieldPath.length() + 1);
AbstractTable<?> innerTable = dbPersistence.getTable(entry.getValue().getFieldClazz());
ColumnIndex<?> innerIndex = innerTable.createFulltextIndex(property, rest);
result = new ColumnIndex<T>(dbPersistence, this, property, entry.getKey(), innerIndex);
} else {
result = new FulltextIndex<T>(dbPersistence, this, property, entry.getKey());
}
indexes.add(result);
return result;
}
//
public ColumnIndex<T> createIndex(Object key) {
PropertyInterface property = Keys.getProperty(key);
String fieldPath = property.getFieldPath();
ColumnIndex<T> index = createIndex(property, fieldPath);
return index;
}
public ColumnIndex<T> createIndex(PropertyInterface property, String fieldPath) {
Map.Entry<String, PropertyInterface> entry = findX(fieldPath);
if (indexes.contains(entry.getKey())) {
return (ColumnIndex<T>) indexes.get(indexes.indexOf(entry.getKey()));
}
ColumnIndex<?> innerIndex = null;
String myFieldPath = entry.getValue().getFieldPath();
if (fieldPath.length() > myFieldPath.length()) {
String rest = fieldPath.substring(myFieldPath.length() + 1);
AbstractTable<?> innerTable = dbPersistence.getTable(entry.getValue().getFieldClazz());
innerIndex = innerTable.createIndex(property, rest);
}
ColumnIndex<T> result = new ColumnIndex<T>(dbPersistence, this, property, entry.getKey(), innerIndex);
indexes.add(result);
return result;
}
//
protected Entry<String, PropertyInterface> findX(String fieldPath) {
while (true) {
for (Map.Entry<String, PropertyInterface> entry : columns.entrySet()) {
String columnFieldPath = entry.getValue().getFieldPath();
if (columnFieldPath.equals(fieldPath)) {
return entry;
}
}
int index = fieldPath.lastIndexOf('.');
if (index < 0) throw new IllegalArgumentException();
fieldPath = fieldPath.substring(0, index);
}
}
}
|
Minimal-J/src/main/java/ch/openech/mj/db/AbstractTable.java
|
package ch.openech.mj.db;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import ch.openech.mj.criteria.Criteria;
import ch.openech.mj.criteria.Criteria.SimpleCriteria;
import ch.openech.mj.edit.value.CloneHelper;
import ch.openech.mj.model.EnumUtils;
import ch.openech.mj.model.Keys;
import ch.openech.mj.model.PropertyInterface;
import ch.openech.mj.model.Search;
import ch.openech.mj.model.ViewUtil;
import ch.openech.mj.model.properties.ChainedProperty;
import ch.openech.mj.model.properties.InlineProperty;
import ch.openech.mj.model.properties.SimpleProperty;
import ch.openech.mj.util.FieldUtils;
import ch.openech.mj.util.GenericUtils;
import ch.openech.mj.util.IdUtils;
import ch.openech.mj.util.LoggingRuntimeException;
import ch.openech.mj.util.StringUtils;
/**
* Minimal-J internal<p>
*
* Base class of all table representing classes in this persistence layer.
* Normally you should not need to extend from this class directly. Use
* the existing subclasses or only the methods in DbPersistence.
*
*/
public abstract class AbstractTable<T> {
public static final Logger sqlLogger = Logger.getLogger("SQL");
protected final DbPersistence dbPersistence;
protected final DbPersistenceHelper helper;
protected final Class<T> clazz;
protected final LinkedHashMap<String, PropertyInterface> columns;
protected final LinkedHashMap<String, PropertyInterface> lists;
protected final String name;
protected final List<Index<T>> indexes = new ArrayList<>();
protected final Map<Search<T>, MultiIndex<T>> searches = new HashMap<>();
protected final Map<Connection, Map<String, PreparedStatement>> statements = new HashMap<>();
protected final String selectByIdQuery;
protected final String insertQuery;
protected final String selectMaxIdQuery;
protected final String clearQuery;
public AbstractTable(DbPersistence dbPersistence, String name, Class<T> clazz) {
this.dbPersistence = dbPersistence;
this.helper = new DbPersistenceHelper(dbPersistence);
this.name = name != null ? name : StringUtils.toDbName(clazz.getSimpleName());
this.clazz = clazz;
this.columns = findColumns(clazz);
this.lists = findLists(clazz);
this.selectByIdQuery = selectByIdQuery();
this.insertQuery = insertQuery();
this.selectMaxIdQuery = selectMaxIdQuery();
this.clearQuery = clearQuery();
findImmutables();
findIndexes();
findSearches();
}
private LinkedHashMap<String, PropertyInterface> findColumns(Class<?> clazz) {
LinkedHashMap<String, PropertyInterface> columns = new LinkedHashMap<String, PropertyInterface>();
for (Field field : clazz.getFields()) {
if (!FieldUtils.isPublic(field) || FieldUtils.isStatic(field) || FieldUtils.isTransient(field)) continue;
String fieldName = StringUtils.toDbName(field.getName());
if (fieldName.equals("ID") && FieldUtils.isAllowedId(field.getType())) continue;
if (fieldName.equals("VERSION") && FieldUtils.isAllowedVersionType(field.getType())) continue;
if (FieldUtils.isList(field)) continue;
if (DbPersistenceHelper.isView(field)) {
columns.put(fieldName, new SimpleProperty(clazz, field));
} else if (FieldUtils.isFinal(field) && !FieldUtils.isSet(field)) {
if (!dbPersistence.isImmutable(field.getType())) {
Map<String, PropertyInterface> inlinePropertys = findColumns(field.getType());
boolean hasClassName = FieldUtils.hasClassName(field);
for (String inlineKey : inlinePropertys.keySet()) {
String key = inlineKey;
if (!hasClassName) {
key = fieldName + "_" + inlineKey;
}
columns.put(key, new ChainedProperty(clazz, field, inlinePropertys.get(inlineKey)));
}
} else {
columns.put(fieldName, new InlineProperty(clazz, field));
}
} else {
columns.put(fieldName, new SimpleProperty(clazz, field));
}
}
return columns;
}
private LinkedHashMap<String, PropertyInterface> findLists(Class<?> clazz) {
LinkedHashMap<String, PropertyInterface> properties = new LinkedHashMap<String, PropertyInterface>();
for (Field field : clazz.getFields()) {
if (!FieldUtils.isPublic(field) || FieldUtils.isStatic(field) || FieldUtils.isTransient(field)) continue;
if (!dbPersistence.isImmutable(field.getType()) && FieldUtils.isFinal(field) && !FieldUtils.isList(field)) {
// This is needed to check if an inline Property contains a List
Map<String, PropertyInterface> inlinePropertys = findLists(field.getType());
boolean hasClassName = FieldUtils.hasClassName(field);
for (String inlineKey : inlinePropertys.keySet()) {
String key = inlineKey;
if (!hasClassName) {
key = field.getName() + StringUtils.upperFirstChar(inlineKey);
}
properties.put(key, new ChainedProperty(clazz, field, inlinePropertys.get(inlineKey)));
}
} else if (FieldUtils.isList(field)) {
properties.put(field.getName(), new SimpleProperty(clazz, field));
}
}
return properties;
}
protected LinkedHashMap<String, PropertyInterface> getColumns() {
return columns;
}
protected LinkedHashMap<String, PropertyInterface> getLists() {
return lists;
}
protected Collection<Index<T>> getIndexes() {
return indexes;
}
protected PreparedStatement getStatement(Connection connection, String query, boolean returnGeneratedKeys) throws SQLException {
if (!statements.containsKey(connection)) {
statements.put(connection, new HashMap<String, PreparedStatement>());
}
Map<String, PreparedStatement> statementsForConnection = statements.get(connection);
if (!statementsForConnection.containsKey(query)) {
statementsForConnection.put(query, createStatement(connection, query, returnGeneratedKeys));
}
return statementsForConnection.get(query);
}
PreparedStatement createStatement(Connection connection, String query, boolean returnGeneratedKeys) throws SQLException {
if (returnGeneratedKeys) {
if (sqlLogger.isLoggable(Level.FINE)) {
return new LoggingPreparedStatement(connection, query, Statement.RETURN_GENERATED_KEYS, sqlLogger);
} else {
return connection.prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
}
} else {
if (sqlLogger.isLoggable(Level.FINE)) {
return new LoggingPreparedStatement(connection, query, sqlLogger);
} else {
return connection.prepareStatement(query);
}
}
}
public int getMaxId() {
try {
PreparedStatement statement = getStatement(dbPersistence.getConnection(), selectMaxIdQuery, false);
try (ResultSet resultSet = statement.executeQuery()) {
if (resultSet.next()) {
return resultSet.getInt(1);
} else {
return 0;
}
} catch (SQLException x) {
throw new LoggingRuntimeException(x, sqlLogger, "Couldn't get max Id of " + getTableName());
}
} catch (SQLException x) {
throw new LoggingRuntimeException(x, sqlLogger, "Couldn't get max Id of " + getTableName());
}
}
public void create() throws SQLException {
DbCreator creator = new DbCreator(dbPersistence);
creator.create(dbPersistence.getConnection(), this);
}
public void clear() {
try {
PreparedStatement statement = getStatement(dbPersistence.getConnection(), clearQuery, false);
statement.execute();
} catch (SQLException x) {
throw new LoggingRuntimeException(x, sqlLogger, "Clear of Table " + getTableName() + " failed");
}
}
public List<T> search(Search<T> search, Object query) {
MultiIndex<T> multiIndex = searches.get(search);
List<Long> ids = multiIndex.findIds(query);
List<T> objects = new ArrayList<>(ids.size());
for (long id : ids) {
objects.add(multiIndex.lookup(id));
}
return objects;
}
public List<T> read(Criteria criteria) {
if (criteria instanceof SimpleCriteria) {
SimpleCriteria simpleCriteria = (SimpleCriteria) criteria;
PropertyInterface propertyInterface = Keys.getProperty(simpleCriteria.getKey());
String query = "select * from " + getTableName() + " where " + whereStatement(propertyInterface.getFieldPath());
try {
PreparedStatement statement = getStatement(dbPersistence.getConnection(), query, false);
Object value = simpleCriteria.getValue();
// TODO merge with setParameter
if (DbPersistenceHelper.isView(propertyInterface)) {
value = IdUtils.getId(value);
}
statement.setObject(1, value);
return executeSelectAll(statement);
} catch (SQLException e) {
throw new LoggingRuntimeException(e, sqlLogger, "read with ReferenceCriteria failed");
}
}
throw new IllegalArgumentException(criteria + " not yet implemented");
}
private String whereStatement(final String wholeFieldPath) {
String fieldPath = wholeFieldPath;
String column;
while (true) {
column = findColumn(fieldPath);
if (column != null) break;
int pos = fieldPath.lastIndexOf('.');
if (pos < 0) throw new IllegalArgumentException("FieldPath " + wholeFieldPath + " not even partially found in " + getTableName());
fieldPath = fieldPath.substring(0, pos);
}
if (fieldPath.length() < wholeFieldPath.length()) {
String restOfFieldPath = wholeFieldPath.substring(fieldPath.length() + 1);
PropertyInterface subProperty = columns.get(column);
AbstractTable<?> subTable = dbPersistence.getTable(subProperty.getFieldClazz());
return column + " = select (ID from " + subTable.getTableName() + " where " + subTable.whereStatement(restOfFieldPath) + ")";
} else {
return column + " = ?";
}
}
private String findColumn(String fieldPath) {
for (Map.Entry<String, PropertyInterface> entry : columns.entrySet()) {
if (entry.getValue().getFieldPath().equals(fieldPath)) {
return entry.getKey();
}
}
return null;
}
protected String getTableName() {
return name;
}
public Class<T> getClazz() {
return clazz;
}
private void findImmutables() {
for (Map.Entry<String, PropertyInterface> column : getColumns().entrySet()) {
PropertyInterface property = column.getValue();
if (DbPersistenceHelper.isView(property)) continue;
if (DbPersistenceHelper.isReference(property)) {
AbstractTable<?> refTable = dbPersistence.getImmutableTable(property.getFieldClazz());
if (refTable == null) {
dbPersistence.addImmutableClass(property.getFieldClazz());
}
}
}
}
private void findSearches() {
System.out.println("Find Searches in " + clazz.getSimpleName());
for (Field field : clazz.getFields()) {
if (!FieldUtils.isFinal(field) || !FieldUtils.isStatic(field)) continue;
if (field.getType() == Search.class) {
@SuppressWarnings("unchecked")
Search<T> search = (Search<T>) FieldUtils.getStaticValue(field);
createFulltextIndex(search);
}
}
}
private void findIndexes() {
// for (Map.Entry<String, PropertyInterface> column : columns.entrySet()) {
// PropertyInterface property = column.getValue();
// if (property.getType() instanceof Reference<?>) {
// createIndex(property, property.getFieldPath());
// }
// }
}
// execution helpers
protected long executeInsertWithAutoIncrement(PreparedStatement statement, T object) throws SQLException {
return executeInsertWithAutoIncrement(statement, object, null);
}
protected long executeInsertWithAutoIncrement(PreparedStatement statement, T object, Integer hash) throws SQLException {
setParameters(statement, object, false, true, hash);
statement.execute();
try (ResultSet autoIncrementResultSet = statement.getGeneratedKeys()) {
autoIncrementResultSet.next();
long id = autoIncrementResultSet.getLong(1);
if (sqlLogger.isLoggable(Level.FINE)) sqlLogger.fine("AutoIncrement is " + id);
return id;
}
}
protected void executeInsert(PreparedStatement statement, T object) throws SQLException {
setParameters(statement, object);
statement.execute();
}
protected T executeSelect(PreparedStatement preparedStatement) throws SQLException {
return executeSelect(preparedStatement, null);
}
protected T executeSelect(PreparedStatement preparedStatement, Integer time) throws SQLException {
try (ResultSet resultSet = preparedStatement.executeQuery()) {
if (resultSet.next()) {
return readResultSetRow(resultSet, time);
} else {
return null;
}
}
}
protected List<T> executeSelectAll(PreparedStatement preparedStatement) throws SQLException {
List<T> result = new ArrayList<T>();
try (ResultSet resultSet = preparedStatement.executeQuery()) {
while (resultSet.next()) {
T object = readResultSetRow(resultSet, null);
result.add(object);
}
}
return result;
}
protected T readResultSetRow(ResultSet resultSet, Integer time) throws SQLException {
T result = CloneHelper.newInstance(clazz);
for (int columnIndex = 1; columnIndex <= resultSet.getMetaData().getColumnCount(); columnIndex++) {
String columnName = resultSet.getMetaData().getColumnName(columnIndex);
if ("ID".equalsIgnoreCase(columnName) && this instanceof Table) {
IdUtils.setId(result, resultSet.getLong(columnIndex));
continue;
} else if ("VERSION".equalsIgnoreCase(columnName) && this instanceof HistorizedTable) {
IdUtils.setVersion(result, resultSet.getInt(columnIndex));
continue;
}
PropertyInterface property = columns.get(columnName);
if (property == null) continue;
Object value = resultSet.getObject(columnIndex);
if (value != null) {
Class<?> fieldClass = property.getFieldClazz();
if (DbPersistenceHelper.isView(property)) {
Class<?> viewedClass = DbPersistenceHelper.getViewedClass(property);
Table<?> referenceTable = (Table<?>) dbPersistence.getTable(viewedClass);
Object referenceObject = referenceTable.read(((Number) value).longValue(), false); // false -> subEntities not loaded
value = CloneHelper.newInstance(fieldClass);
ViewUtil.view(referenceObject, value);
} else if (DbPersistenceHelper.isReference(property)) {
if (!dbPersistence.isImmutable(fieldClass)) {
value = CloneHelper.newInstance(fieldClass);
IdUtils.setId(value, (Long) value);
} else {
value = dereference(fieldClass, IdUtils.convertToLong(value), time);
}
} else if (fieldClass == Set.class) {
Set<?> set = (Set<?>) property.getValue(result);
Class<?> enumClass = GenericUtils.getGenericClass(property.getType());
EnumUtils.fillSet((int) value, enumClass, set);
continue; // skip setValue, it's final
} else {
value = helper.convertToFieldClass(fieldClass, value);
}
property.setValue(result, value);
}
}
return result;
}
protected <D> Object dereference(Class<D> clazz, long id, Integer time) {
AbstractTable<D> table = dbPersistence.getTable(clazz);
if (table instanceof ImmutableTable) {
return ((ImmutableTable<?>) table).read(id);
} else if (table instanceof HistorizedTable<?>) {
return ((HistorizedTable<?>) table).read(id, time);
} else if (table instanceof Table) {
return ((Table<?>) table).read(id);
} else {
throw new IllegalArgumentException("Clazz: " + clazz);
}
}
/**
* Search or create an immutable.<br>
* At the moment no references of other values than immutables are allowed.
*
* @param value the object from which to get the reference.
* @param insertIfNotExisting true => create if not existing
* @return <code>if value not found and parameter insert is false
* @throws SQLException
*/
private <D> Long getIdOfImmutable(D value, boolean insertIfNotExisting) throws SQLException {
@SuppressWarnings("unchecked")
Class<D> clazz = (Class<D>) value.getClass();
AbstractTable<D> abstractTable = dbPersistence.getTable(clazz);
if (abstractTable == null) {
throw new IllegalArgumentException(clazz.getName());
}
if (abstractTable instanceof ImmutableTable) {
return ((ImmutableTable<D>) abstractTable).getOrCreateId(value);
} else {
throw new IllegalArgumentException(clazz.getName());
}
}
protected int setParameters(PreparedStatement statement, T object) throws SQLException {
return setParameters(statement, object, null);
}
protected int setParameters(PreparedStatement statement, T object, Integer hash) throws SQLException {
return setParameters(statement, object, false, false, hash);
}
protected int setParameters(PreparedStatement statement, T object, boolean doubleValues, boolean insert) throws SQLException {
return setParameters(statement, object, doubleValues, insert, null);
}
protected int setParameters(PreparedStatement statement, T object, boolean doubleValues, boolean insert, Integer hash) throws SQLException {
int parameterPos = 1;
for (Map.Entry<String, PropertyInterface> column : columns.entrySet()) {
PropertyInterface property = column.getValue();
Object value = property.getValue(object);
if (value != null) {
if (DbPersistenceHelper.isView(property)) {
value = IdUtils.getId(value);
} else if (DbPersistenceHelper.isReference(property)) {
value = getIdOfImmutable(value, insert);
}
}
helper.setParameter(statement, parameterPos++, value, property);
if (doubleValues) helper.setParameter(statement, parameterPos++, value, property);
}
if (hash != null) {
statement.setInt(parameterPos++, hash);
if (doubleValues) statement.setInt(parameterPos++, hash);
}
return parameterPos;
}
protected abstract String insertQuery();
protected abstract String selectByIdQuery();
protected String selectMaxIdQuery() {
StringBuilder query = new StringBuilder();
query.append("SELECT MAX(id) FROM "); query.append(getTableName());
return query.toString();
}
protected String clearQuery() {
StringBuilder query = new StringBuilder();
query.append("DELETE FROM "); query.append(getTableName());
return query.toString();
}
protected String selectIdQuery() {
StringBuilder where = new StringBuilder();
boolean first = true;
for (String key : columns.keySet()) {
if (!first) where.append(" AND "); else first = false;
// where.append(column.getName()); where.append(" = ?");
// doesnt work for null so pattern is:
// ((? IS NULL AND col1 IS NULL) OR col1 = ?)
where.append("((? IS NULL AND "); where.append(key); where.append(" IS NULL) OR ");
where.append(key); where.append(" = ?)");
}
if (this instanceof Table) {
where.append(" AND ((? IS NULL) OR event <= ?)");
where.append(" AND (endEvent IS NULL OR (endEvent IS NOT NULL AND (? IS NULL OR ? < endEvent)))");
}
StringBuilder query = new StringBuilder();
query.append("SELECT id FROM "); query.append(getTableName()); query.append(" WHERE ");
query.append(where);
return query.toString();
}
private void createFulltextIndex(Search<T> search) {
Object[] keys = search.getKeys();
ColumnIndex<T>[] indexes = new ColumnIndex[keys.length];
for (int i = 0; i<keys.length; i++) {
PropertyInterface property = Keys.getProperty(keys[i]);
if (property.getFieldClazz() == String.class) {
indexes[i] = createFulltextIndex(keys[i]);
} else {
indexes[i] = createIndex(keys[i]);
}
}
MultiIndex<T> index = new MultiIndex<T>(indexes);
searches.put(search, index);
}
private ColumnIndex<T> createFulltextIndex(Object key) {
PropertyInterface property = Keys.getProperty(key);
String fieldPath = property.getFieldPath();
ColumnIndex<T> index = createFulltextIndex(property, fieldPath);
return index;
}
private ColumnIndex<T> createFulltextIndex(PropertyInterface property, String fieldPath) {
ColumnIndex<T> result;
Map.Entry<String, PropertyInterface> entry = findX(fieldPath);
String myFieldPath = entry.getValue().getFieldPath();
if (fieldPath.length() > myFieldPath.length()) {
String rest = fieldPath.substring(myFieldPath.length() + 1);
AbstractTable<?> innerTable = dbPersistence.getTable(entry.getValue().getFieldClazz());
ColumnIndex<?> innerIndex = innerTable.createFulltextIndex(property, rest);
result = new ColumnIndex<T>(dbPersistence, this, property, entry.getKey(), innerIndex);
} else {
result = new FulltextIndex<T>(dbPersistence, this, property, entry.getKey());
}
indexes.add(result);
return result;
}
//
public ColumnIndex<T> createIndex(Object key) {
PropertyInterface property = Keys.getProperty(key);
String fieldPath = property.getFieldPath();
ColumnIndex<T> index = createIndex(property, fieldPath);
return index;
}
public ColumnIndex<T> createIndex(PropertyInterface property, String fieldPath) {
Map.Entry<String, PropertyInterface> entry = findX(fieldPath);
if (indexes.contains(entry.getKey())) {
return (ColumnIndex<T>) indexes.get(indexes.indexOf(entry.getKey()));
}
ColumnIndex<?> innerIndex = null;
String myFieldPath = entry.getValue().getFieldPath();
if (fieldPath.length() > myFieldPath.length()) {
String rest = fieldPath.substring(myFieldPath.length() + 1);
AbstractTable<?> innerTable = dbPersistence.getTable(entry.getValue().getFieldClazz());
innerIndex = innerTable.createIndex(property, rest);
}
ColumnIndex<T> result = new ColumnIndex<T>(dbPersistence, this, property, entry.getKey(), innerIndex);
indexes.add(result);
return result;
}
//
protected Entry<String, PropertyInterface> findX(String fieldPath) {
while (true) {
for (Map.Entry<String, PropertyInterface> entry : columns.entrySet()) {
String columnFieldPath = entry.getValue().getFieldPath();
if (columnFieldPath.equals(fieldPath)) {
return entry;
}
}
int index = fieldPath.lastIndexOf('.');
if (index < 0) throw new IllegalArgumentException();
fieldPath = fieldPath.substring(0, index);
}
}
}
|
AbstractTable.findColumns: special check for view fields not necessary
|
Minimal-J/src/main/java/ch/openech/mj/db/AbstractTable.java
|
AbstractTable.findColumns: special check for view fields not necessary
|
|
Java
|
bsd-3-clause
|
1ab829535f00aa929bd4004c871c7bd494d83394
| 0
|
wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy
|
/*
* Copyright (c) 2013, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.uci.python.builtins.type;
import java.util.*;
import com.oracle.truffle.api.dsl.*;
import edu.uci.python.builtins.*;
import edu.uci.python.nodes.function.*;
import edu.uci.python.runtime.array.*;
import edu.uci.python.runtime.datatype.*;
import edu.uci.python.runtime.sequence.*;
import edu.uci.python.runtime.sequence.storage.*;
/**
* @author Gulfem
* @author zwei
*/
public final class StringBuiltins extends PythonBuiltins {
@Override
protected List<com.oracle.truffle.api.dsl.NodeFactory<? extends PythonBuiltinNode>> getNodeFactories() {
return StringBuiltinsFactory.getFactories();
}
private static final PList idmap = new PList();
static {
for (int i = 0; i < 256; i++) {
idmap.append(Character.toString((char) i));
}
}
// str.startswith(prefix[, start[, end]])
@Builtin(name = "startswith", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class StartsWithNode extends PythonBuiltinNode {
@Specialization
public Object startsWith(String self, String prefix) {
if (self.startsWith(prefix)) {
return true;
}
return false;
}
@Specialization
public Object startsWith(Object self, Object prefix) {
throw new RuntimeException("startsWith is not supported for " + self + " " + self.getClass() + " prefix " + prefix);
}
}
// str.join(iterable)
@Builtin(name = "join", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class JoinNode extends PythonBuiltinNode {
@Specialization(order = 0)
public String join(String string, String arg) {
StringBuilder sb = new StringBuilder();
char[] joinString = arg.toCharArray();
for (int i = 0; i < joinString.length - 1; i++) {
sb.append(Character.toString(joinString[i]));
sb.append(string);
}
sb.append(Character.toString(joinString[joinString.length - 1]));
return sb.toString();
}
@Specialization(order = 2, guards = "is2ndObjectStorage")
public String join(String string, PList list) {
StringBuilder sb = new StringBuilder();
ObjectSequenceStorage store = (ObjectSequenceStorage) list.getStorage();
for (int i = 0; i < list.len() - 1; i++) {
sb.append(store.getItemInBound(i));
sb.append(string);
}
sb.append(list.getItem(list.len() - 1));
return sb.toString();
}
@Specialization(order = 5)
public String join(String string, PSequence seq) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < seq.len() - 1; i++) {
sb.append(seq.getItem(i).toString());
sb.append(string);
}
sb.append(seq.getItem(seq.len() - 1));
return sb.toString();
}
@Specialization(order = 6)
public String join(String string, PCharArray array) {
StringBuilder sb = new StringBuilder();
char[] stringList = array.getSequence();
for (int i = 0; i < stringList.length - 1; i++) {
sb.append(Character.toString(stringList[i]));
sb.append(string);
}
sb.append(Character.toString(stringList[stringList.length - 1]));
return sb.toString();
}
@Specialization(order = 7)
public String join(String string, PSet arg) {
if (arg.len() == 0) {
return string.toString();
}
StringBuilder sb = new StringBuilder();
Object[] joinString = arg.getSet().toArray();
for (int i = 0; i < joinString.length - 1; i++) {
sb.append(joinString[i]);
sb.append(string);
}
sb.append(joinString[joinString.length - 1]);
return sb.toString();
}
@Specialization(order = 8)
public String join(Object self, Object arg) {
throw new RuntimeException("invalid arguments type for join(): self " + self + ", arg " + arg);
}
}
// str.upper()
@Builtin(name = "upper", fixedNumOfArguments = 1, hasFixedNumOfArguments = true)
public abstract static class UpperNode extends PythonBuiltinNode {
@Specialization
public String upper(String self) {
return self.toUpperCase();
}
}
// static str.maketrans()
@Builtin(name = "maketrans", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class MakeTransNode extends PythonBuiltinNode {
@Specialization
public PDict maketrans(String from, String to) {
if (from.length() != to.length()) {
throw new RuntimeException("maketrans arguments must have same length");
}
PDict translation = new PDict();
for (int i = 0; i < from.length(); i++) {
int key = from.charAt(i);
int value = to.charAt(i);
translation.setItem(key, value);
}
return translation;
}
}
// str.translate()
@Builtin(name = "translate", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class TranslateNode extends PythonBuiltinNode {
@Specialization(order = 0)
public String translate(String self, PDict table) {
char[] translatedChars = new char[self.length()];
for (int i = 0; i < self.length(); i++) {
char original = self.charAt(i);
Object translated = table.getItem((int) original);
int ord = translated == null ? original : (int) translated;
translatedChars[i] = (char) ord;
}
return new String(translatedChars);
}
}
// str.lower()
@Builtin(name = "lower", fixedNumOfArguments = 1, hasFixedNumOfArguments = true)
public abstract static class LowerNode extends PythonBuiltinNode {
@Specialization(order = 0)
public String lower(String self) {
return self.toLowerCase();
}
}
// str.split
@Builtin(name = "split", maxNumOfArguments = 3)
public abstract static class SplitNode extends PythonBuiltinNode {
@Specialization
public PList doSplit(String self, @SuppressWarnings("unused") PNone sep, int maxsplit) {
return splitfields(self, maxsplit);
}
// See {@link PyString}
private static PList splitfields(String s, int maxsplit) {
/*
* Result built here is a list of split parts, exactly as required for s.split(None,
* maxsplit). If there are to be n splits, there will be n+1 elements in L.
*/
PList list = new PList();
int length = s.length();
int start = 0;
int splits = 0;
int index;
int maxsplit2 = maxsplit;
if (maxsplit2 < 0) {
// Make all possible splits: there can't be more than:
maxsplit2 = length;
}
// start is always the first character not consumed into a piece on the list
while (start < length) {
// Find the next occurrence of non-whitespace
while (start < length) {
if (!Character.isWhitespace(s.charAt(start))) {
// Break leaving start pointing at non-whitespace
break;
}
start++;
}
if (start >= length) {
// Only found whitespace so there is no next segment
break;
} else if (splits >= maxsplit2) {
// The next segment is the last and contains all characters up to the end
index = length;
} else {
// The next segment runs up to the next next whitespace or end
for (index = start; index < length; index++) {
if (Character.isWhitespace(s.charAt(index))) {
// Break leaving index pointing at whitespace
break;
}
}
}
// Make a piece from start up to index
list.append(s.substring(start, index));
splits++;
// Start next segment search at that point
start = index;
}
return list;
}
}
}
|
graal/edu.uci.python.builtins/src/edu/uci/python/builtins/type/StringBuiltins.java
|
/*
* Copyright (c) 2013, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.uci.python.builtins.type;
import java.util.*;
import com.oracle.truffle.api.dsl.*;
import edu.uci.python.builtins.*;
import edu.uci.python.nodes.function.*;
import edu.uci.python.runtime.array.*;
import edu.uci.python.runtime.datatype.*;
import edu.uci.python.runtime.sequence.*;
import edu.uci.python.runtime.sequence.storage.*;
/**
* @author Gulfem
* @author zwei
*/
public final class StringBuiltins extends PythonBuiltins {
@Override
protected List<com.oracle.truffle.api.dsl.NodeFactory<? extends PythonBuiltinNode>> getNodeFactories() {
return StringBuiltinsFactory.getFactories();
}
private static final PList idmap = new PList();
static {
for (int i = 0; i < 256; i++) {
idmap.append(Character.toString((char) i));
}
}
// str.startswith(prefix[, start[, end]])
@Builtin(name = "startswith", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class StartsWithNode extends PythonBuiltinNode {
@Specialization
public Object startsWith(String self, String prefix) {
if (self.startsWith(prefix)) {
return true;
}
return false;
}
@Specialization
public Object startsWith(Object self, Object prefix) {
throw new RuntimeException("startsWith is not supported for " + self + " " + self.getClass() + " prefix " + prefix);
}
}
// str.join(iterable)
@Builtin(name = "join", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class JoinNode extends PythonBuiltinNode {
@Specialization(order = 0)
public String join(String string, String arg) {
StringBuilder sb = new StringBuilder();
char[] joinString = arg.toCharArray();
for (int i = 0; i < joinString.length - 1; i++) {
sb.append(Character.toString(joinString[i]));
sb.append(string);
}
sb.append(Character.toString(joinString[joinString.length - 1]));
return sb.toString();
}
@Specialization(order = 2, guards = "is2ndObjectStorage")
public String join(String string, PList list) {
StringBuilder sb = new StringBuilder();
ObjectSequenceStorage store = (ObjectSequenceStorage) list.getStorage();
for (int i = 0; i < list.len() - 1; i++) {
sb.append(store.getItemInBound(i));
sb.append(string);
}
sb.append(list.getItem(list.len() - 1));
return sb.toString();
}
@Specialization(order = 5)
public String join(String string, PSequence seq) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < seq.len() - 1; i++) {
sb.append(seq.getItem(i).toString());
sb.append(string);
}
sb.append(seq.getItem(seq.len() - 1));
return sb.toString();
}
@Specialization(order = 6)
public String join(String string, PCharArray array) {
StringBuilder sb = new StringBuilder();
char[] stringList = array.getSequence();
for (int i = 0; i < stringList.length - 1; i++) {
sb.append(Character.toString(stringList[i]));
sb.append(string);
}
sb.append(Character.toString(stringList[stringList.length - 1]));
return sb.toString();
}
@Specialization(order = 7)
public String join(String string, PSet arg) {
if (arg.len() == 0) {
return string.toString();
}
StringBuilder sb = new StringBuilder();
Object[] joinString = arg.getSet().toArray();
for (int i = 0; i < joinString.length - 1; i++) {
sb.append(joinString[i]);
sb.append(string);
}
sb.append(joinString[joinString.length - 1]);
return sb.toString();
}
@Specialization(order = 8)
public String join(Object self, Object arg) {
throw new RuntimeException("invalid arguments type for join(): self " + self + ", arg " + arg);
}
}
// str.upper()
@Builtin(name = "upper", fixedNumOfArguments = 1, hasFixedNumOfArguments = true)
public abstract static class UpperNode extends PythonBuiltinNode {
@Specialization
public String upper(String self) {
return self.toUpperCase();
}
}
// static str.maketrans()
@Builtin(name = "maketrans", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class MakeTransNode extends PythonBuiltinNode {
@Specialization
public PDict maketrans(String from, String to) {
if (from.length() != to.length()) {
throw new RuntimeException("maketrans arguments must have same length");
}
PDict translation = new PDict();
for (int i = 0; i < from.length(); i++) {
int key = from.charAt(i);
int value = to.charAt(i);
translation.setItem(key, value);
}
return translation;
}
}
// str.translate()
@Builtin(name = "translate", fixedNumOfArguments = 2, hasFixedNumOfArguments = true)
public abstract static class TranslateNode extends PythonBuiltinNode {
@Specialization(order = 0)
public String translate(String self, PDict table) {
char[] translatedChars = new char[self.length()];
for (int i = 0; i < self.length(); i++) {
char original = self.charAt(i);
Object translated = table.getItem((int) original);
int ord = translated == null ? original : (int) translated;
translatedChars[i] = (char) ord;
}
return new String(translatedChars);
}
}
// str.lower()
@Builtin(name = "lower", fixedNumOfArguments = 1, hasFixedNumOfArguments = true)
public abstract static class LowerNode extends PythonBuiltinNode {
@Specialization(order = 0)
public String lower(String self) {
return self.toLowerCase();
}
}
}
|
str.split()
|
graal/edu.uci.python.builtins/src/edu/uci/python/builtins/type/StringBuiltins.java
|
str.split()
|
|
Java
|
bsd-3-clause
|
fbf7e1e6de1e34eebcadc62e45fb8a6d1a373bc9
| 0
|
groupon/monsoon,groupon/monsoon,groupon/monsoon,groupon/monsoon
|
package com.groupon.lex.metrics.history.xdr;
import com.groupon.lex.metrics.history.TSData;
import com.groupon.lex.metrics.history.TSDataVersionDispatch.Releaseable;
import com.groupon.lex.metrics.history.v2.Compression;
import com.groupon.lex.metrics.history.v2.tables.ReadonlyTableFile;
import com.groupon.lex.metrics.history.v2.tables.ToXdrTables;
import com.groupon.lex.metrics.history.xdr.support.FileUtil;
import com.groupon.lex.metrics.lib.GCCloseable;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.acplt.oncrpc.OncRpcException;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
/**
* A task that optimizes a set of files into a single table file.
*
* @author ariane
*/
public class TSDataOptimizerTask {
private static final Logger LOG = Logger.getLogger(TSDataOptimizerTask.class.getName());
/**
* The task pool handles the creation of temporary files containing all
* data. The task is highly CPU bound (especially the gathering of data
* stage in ToXdrTables). Limiting it to 1 thread ensures the tasks don't
* overwhelm the ForkJoinPool and the limit of 1 thread means multiple
* compression actions will be queued one-after-the-other.
*
* The task itself mainly uses the ForkJoinPool, so this thread spends most
* of the time waiting for work to complete (or new work to come in).
*/
private static final ExecutorService TASK_POOL = Executors.newFixedThreadPool(1);
/**
* The install pool handles the file installation part of creating a new
* tables file. It is IO bound, simply copying from a temporary file to the
* final file.
*
* Since it's IO bound, it won't interfere with ForkJoinPool tasks. It is a
* separate thread from the task pool, to allow the task pool to pick up a
* new file while the old file is being written out.
*/
private static final ExecutorService INSTALL_POOL = Executors.newFixedThreadPool(1);
/**
* List of outstanding futures. The list is used during program termination
* to cancel all incomplete futures and thus shut down any dependant tasks
* properly.
*
* Access is {@code synchronized(OUTSTANDING)}.
*/
private static final List<CompletableFuture<NewFile>> OUTSTANDING = new LinkedList<>();
/**
* Destination directory in which to write files. Also used as a location
* for temporary files.
*/
private final Path destDir;
/**
* List of files to add to the generated tables file.
*/
private Map<Path, Reference<TSData>> files = new HashMap<>();
static {
// Create shutdown hook that cancels all outstanding futures and tears
// down the threads.
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
synchronized (OUTSTANDING) {
OUTSTANDING.forEach(fut -> fut.cancel(false));
}
INSTALL_POOL.shutdown();
TASK_POOL.shutdown();
if (!INSTALL_POOL.awaitTermination(30, TimeUnit.SECONDS))
LOG.log(Level.WARNING, "Install pool did not shut down after 30 seconds.");
} catch (InterruptedException ex) {
LOG.log(Level.SEVERE, "Interrupted while waiting for clean shutdown of install pool.", ex);
}
}));
}
/**
* Create a new optimizer task and fill it with the given files.
*
* @param destDir the destination directory in which the new file will be
* placed.
* @param files zero or more files to add; note that the value of the map is
* allowed to be null.
*/
public TSDataOptimizerTask(@NonNull Path destDir, @NonNull Map<Path, TSData> files) {
this(destDir);
files.forEach(this::add);
}
/**
* Create a new optimizer task and fill it with the given files.
*
* @param destDir the destination directory in which the new file will be
* placed.
* @param files a list of filenames to add.
*/
public TSDataOptimizerTask(@NonNull Path destDir, @NonNull Collection<Path> files) {
this(destDir);
files.forEach(this::add);
}
/**
* Create a new optimizer task and fill it with the given files.
*
* @param destDir the destination directory in which the new file will be
* placed.
*/
public TSDataOptimizerTask(@NonNull Path destDir) {
this.destDir = destDir;
if (!Files.isDirectory(destDir))
throw new IllegalArgumentException(destDir + " is not a directory");
}
/**
* Add a file to the collection of files that will make up the final
* optimized file.
*
* @param file The name of the file to be added.
* @param tsdata The TSData obtained when opening this file; may be null.
* @return this TSDataOptimizerTask.
*/
public TSDataOptimizerTask add(@NonNull Path file, TSData tsdata) {
files.put(file, new WeakReference<>(tsdata));
return this;
}
/**
* Add a file to the collection of files that will make up the final
* optimized file.
*
* @param file The name of the file to be added.
* @return this TSDataOptimizerTask.
*/
public TSDataOptimizerTask add(@NonNull Path file) {
add(file, null);
return this;
}
/**
* Start creating the optimized file. This operation resets the state of the
* optimizer task, so it can be re-used for subsequent invocations.
*
* @return A completeable future that yields the newly created file.
*/
public CompletableFuture<NewFile> run() {
LOG.log(Level.FINE, "starting optimized file creation for {0} files", files.size());
CompletableFuture<NewFile> fileCreation = new CompletableFuture<>();
final Map<Path, Reference<TSData>> jfpFiles = this.files; // We clear out files below, which makes fjpCreateTmpFile see an empty map if we don't use a separate variable.
TASK_POOL.execute(() -> createTmpFile(fileCreation, destDir, jfpFiles));
synchronized (OUTSTANDING) {
OUTSTANDING.add(fileCreation);
}
this.files = new HashMap<>(); // Do not use clear! This instance is now shared with the createTmpFile task.
return fileCreation;
}
/**
* The fork-join task that creates a new file. This function creates a
* temporary file with the result contents, then passes it on to the install
* thread which will put the final file in place.
*
* @param fileCreation the future that is to be completed after the
* operation.
* @param destDir the destination directory for the result; also used for
* temporary file creation.
* @param files the list of files that make up the resulting file.
*/
private static void createTmpFile(CompletableFuture<NewFile> fileCreation, Path destDir, Map<Path, Reference<TSData>> filesMap) {
LOG.log(Level.FINE, "starting temporary file creation...");
try {
final List<TSData> files;
try {
files = filesMap.entrySet().parallelStream()
.map(fileEntry -> {
try {
TSData tsdata = fileEntry.getValue().get();
if (tsdata == null)
tsdata = TSData.readonly(fileEntry.getKey());
return tsdata;
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
})
.sorted(Comparator.comparing(TSData::getBegin))
.collect(Collectors.toList());
filesMap = null; // Release resources.
} catch (RuntimeIOException ex) {
throw ex.getCause();
}
final FileChannel fd = FileUtil.createTempFile(destDir, "monsoon-", ".optimize-tmp");
try {
final DateTime begin;
try (ToXdrTables output = new ToXdrTables(fd, Compression.DEFAULT_OPTIMIZED)) {
while (!files.isEmpty()) {
TSData tsdata = files.remove(0);
if (fileCreation.isCancelled())
throw new IOException("aborted due to canceled execution");
output.addAll(tsdata);
}
begin = new DateTime(output.getHdrBegin(), DateTimeZone.UTC);
if (fileCreation.isCancelled())
throw new IOException("aborted due to canceled execution");
} // Closing output takes a lot of time.
if (fileCreation.isCancelled()) // Recheck after closing output.
throw new IOException("aborted due to canceled execution");
// Forward the temporary file to the installation, which will complete the operation.
INSTALL_POOL.execute(() -> install(fileCreation, destDir, fd, begin));
} catch (Error | RuntimeException | IOException ex) {
try {
fd.close();
} catch (Error | RuntimeException | IOException ex1) {
ex.addSuppressed(ex1);
}
throw ex;
}
} catch (Error | RuntimeException | IOException ex) {
LOG.log(Level.WARNING, "temporary file for optimization failure", ex);
synchronized (OUTSTANDING) {
OUTSTANDING.remove(fileCreation);
}
fileCreation.completeExceptionally(ex); // Propagate exceptions.
}
}
/**
* Installs the newly created file. This function runs on the install thread
* and essentially performs a copy-operation from the temporary file to the
* final file.
*
* @param fileCreation the completeable future that receives the newly
* created file.
* @param destDir the destination directory in which to install the result
* file.
* @param tmpFile the temporary file used to create the data; will be closed
* by this function.
* @param begin a timestamp indicating where this file begins; used to
* generate a pretty file name.
*/
private static void install(CompletableFuture<NewFile> fileCreation, Path destDir, FileChannel tmpFile, DateTime begin) {
try {
try {
synchronized (OUTSTANDING) {
OUTSTANDING.remove(fileCreation);
}
if (fileCreation.isCancelled())
throw new IOException("Installation aborted, due to cancellation.");
final FileUtil.NamedFileChannel newFile = FileUtil.createNewFile(destDir, prefixForTimestamp(begin), ".optimized");
try (Releaseable<FileChannel> out = new Releaseable<>(newFile.getFileChannel())) {
final long fileSize = tmpFile.size();
LOG.log(Level.INFO, "installing {0} ({1} MB)", new Object[]{newFile.getFileName(), fileSize / 1024.0 / 1024.0});
// Copy tmpFile to out.
long offset = 0;
while (offset < fileSize)
offset += tmpFile.transferTo(offset, fileSize - offset, out.get());
out.get().force(true); // Ensure new file is safely written to permanent storage.
// Complete future with newly created file.
fileCreation.complete(new NewFile(newFile.getFileName(), new ReadonlyTableFile(new GCCloseable<>(out.release()))));
} catch (Error | RuntimeException | IOException | OncRpcException ex) {
// Ensure new file gets destroyed if an error occurs during copying.
try {
Files.delete(newFile.getFileName());
} catch (Error | RuntimeException | IOException ex1) {
ex.addSuppressed(ex1);
}
throw ex;
}
} finally {
// Close tmp file that we got from fjpCreateTmpFile.
tmpFile.close();
}
} catch (Error | RuntimeException | IOException | OncRpcException ex) {
LOG.log(Level.WARNING, "unable to install new file", ex);
fileCreation.completeExceptionally(ex); // Propagate error to future.
}
}
/**
* Compute a prefix for a to-be-installed file.
*
* @param timestamp the timestamp on which to base the prefix.
* @return a file prefix that represents the timestamp in its name.
*/
private static String prefixForTimestamp(DateTime timestamp) {
return String.format("monsoon-%04d%02d%02d-%02d%02d", timestamp.getYear(), timestamp.getMonthOfYear(), timestamp.getDayOfMonth(), timestamp.getHourOfDay(), timestamp.getMinuteOfHour());
}
/**
* The result of installing a new optimized file.
*/
@RequiredArgsConstructor
@Getter
public static class NewFile {
/**
* The name of the newly installed file.
*/
@NonNull
private final Path name;
/**
* The contents of the newly installed file.
*/
@NonNull
private final TSData data;
}
private static class RuntimeIOException extends RuntimeException {
public RuntimeIOException(IOException ex) {
super(ex);
}
@Override
public IOException getCause() {
return (IOException) super.getCause();
}
}
}
|
history/src/main/java/com/groupon/lex/metrics/history/xdr/TSDataOptimizerTask.java
|
package com.groupon.lex.metrics.history.xdr;
import com.groupon.lex.metrics.history.TSData;
import com.groupon.lex.metrics.history.TSDataVersionDispatch.Releaseable;
import com.groupon.lex.metrics.history.v2.Compression;
import com.groupon.lex.metrics.history.v2.tables.ReadonlyTableFile;
import com.groupon.lex.metrics.history.v2.tables.ToXdrTables;
import com.groupon.lex.metrics.history.xdr.support.FileUtil;
import com.groupon.lex.metrics.lib.GCCloseable;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import lombok.Getter;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.acplt.oncrpc.OncRpcException;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
/**
* A task that optimizes a set of files into a single table file.
*
* @author ariane
*/
public class TSDataOptimizerTask {
private static final Logger LOG = Logger.getLogger(TSDataOptimizerTask.class.getName());
/**
* The task pool handles the creation of temporary files containing all
* data. The task is highly CPU bound (especially the gathering of data
* stage in ToXdrTables). Limiting it to 1 thread ensures the tasks don't
* overwhelm the ForkJoinPool and the limit of 1 thread means multiple
* compression actions will be queued one-after-the-other.
*
* The task itself mainly uses the ForkJoinPool, so this thread spends most
* of the time waiting for work to complete (or new work to come in).
*/
private static final ExecutorService TASK_POOL = Executors.newFixedThreadPool(1);
/**
* The install pool handles the file installation part of creating a new
* tables file. It is IO bound, simply copying from a temporary file to the
* final file.
*
* Since it's IO bound, it won't interfere with ForkJoinPool tasks. It is a
* separate thread from the task pool, to allow the task pool to pick up a
* new file while the old file is being written out.
*/
private static final ExecutorService INSTALL_POOL = Executors.newFixedThreadPool(1);
/**
* List of outstanding futures. The list is used during program termination
* to cancel all incomplete futures and thus shut down any dependant tasks
* properly.
*
* Access is {@code synchronized(OUTSTANDING)}.
*/
private static final List<CompletableFuture<NewFile>> OUTSTANDING = new LinkedList<>();
/**
* Destination directory in which to write files. Also used as a location
* for temporary files.
*/
private final Path destDir;
/**
* List of files to add to the generated tables file.
*/
private Map<Path, Reference<TSData>> files = new HashMap<>();
static {
// Create shutdown hook that cancels all outstanding futures and tears
// down the threads.
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
synchronized (OUTSTANDING) {
OUTSTANDING.forEach(fut -> fut.cancel(false));
}
INSTALL_POOL.shutdown();
TASK_POOL.shutdown();
if (!INSTALL_POOL.awaitTermination(30, TimeUnit.SECONDS))
LOG.log(Level.WARNING, "Install pool did not shut down after 30 seconds.");
} catch (InterruptedException ex) {
LOG.log(Level.SEVERE, "Interrupted while waiting for clean shutdown of install pool.", ex);
}
}));
}
/**
* Create a new optimizer task and fill it with the given files.
*
* @param destDir the destination directory in which the new file will be
* placed.
* @param files zero or more files to add; note that the value of the map is
* allowed to be null.
*/
public TSDataOptimizerTask(@NonNull Path destDir, @NonNull Map<Path, TSData> files) {
this(destDir);
files.forEach(this::add);
}
/**
* Create a new optimizer task and fill it with the given files.
*
* @param destDir the destination directory in which the new file will be
* placed.
* @param files a list of filenames to add.
*/
public TSDataOptimizerTask(@NonNull Path destDir, @NonNull Collection<Path> files) {
this(destDir);
files.forEach(this::add);
}
/**
* Create a new optimizer task and fill it with the given files.
*
* @param destDir the destination directory in which the new file will be
* placed.
*/
public TSDataOptimizerTask(@NonNull Path destDir) {
this.destDir = destDir;
if (!Files.isDirectory(destDir))
throw new IllegalArgumentException(destDir + " is not a directory");
}
/**
* Add a file to the collection of files that will make up the final
* optimized file.
*
* @param file The name of the file to be added.
* @param tsdata The TSData obtained when opening this file; may be null.
* @return this TSDataOptimizerTask.
*/
public TSDataOptimizerTask add(@NonNull Path file, TSData tsdata) {
files.put(file, new WeakReference<>(tsdata));
return this;
}
/**
* Add a file to the collection of files that will make up the final
* optimized file.
*
* @param file The name of the file to be added.
* @return this TSDataOptimizerTask.
*/
public TSDataOptimizerTask add(@NonNull Path file) {
add(file, null);
return this;
}
/**
* Start creating the optimized file. This operation resets the state of the
* optimizer task, so it can be re-used for subsequent invocations.
*
* @return A completeable future that yields the newly created file.
*/
public CompletableFuture<NewFile> run() {
LOG.log(Level.FINE, "starting optimized file creation for {0} files", files.size());
CompletableFuture<NewFile> fileCreation = new CompletableFuture<>();
final Map<Path, Reference<TSData>> jfpFiles = this.files; // We clear out files below, which makes fjpCreateTmpFile see an empty map if we don't use a separate variable.
TASK_POOL.execute(() -> createTmpFile(fileCreation, destDir, jfpFiles));
synchronized (OUTSTANDING) {
OUTSTANDING.add(fileCreation);
}
this.files = new HashMap<>(); // Do not use clear! This instance is now shared with the createTmpFile task.
return fileCreation;
}
/**
* The fork-join task that creates a new file. This function creates a
* temporary file with the result contents, then passes it on to the install
* thread which will put the final file in place.
*
* @param fileCreation the future that is to be completed after the
* operation.
* @param destDir the destination directory for the result; also used for
* temporary file creation.
* @param files the list of files that make up the resulting file.
*/
private static void createTmpFile(CompletableFuture<NewFile> fileCreation, Path destDir, Map<Path, Reference<TSData>> files) {
LOG.log(Level.FINE, "starting temporary file creation...");
try {
final FileChannel fd = FileUtil.createTempFile(destDir, "monsoon-", ".optimize-tmp");
try {
final DateTime begin;
try (ToXdrTables output = new ToXdrTables(fd, Compression.DEFAULT_OPTIMIZED)) {
for (Map.Entry<Path, Reference<TSData>> entry : files.entrySet()) {
if (fileCreation.isCancelled())
throw new IOException("aborted due to canceled execution");
TSData tsdata = entry.getValue().get();
if (tsdata == null)
tsdata = TSData.readonly(entry.getKey());
output.addAll(tsdata);
}
begin = new DateTime(output.getHdrBegin(), DateTimeZone.UTC);
if (fileCreation.isCancelled())
throw new IOException("aborted due to canceled execution");
} // Closing output takes a lot of time.
if (fileCreation.isCancelled()) // Recheck after closing output.
throw new IOException("aborted due to canceled execution");
// Forward the temporary file to the installation, which will complete the operation.
INSTALL_POOL.execute(() -> install(fileCreation, destDir, fd, begin));
} catch (Error | RuntimeException | IOException ex) {
try {
fd.close();
} catch (Error | RuntimeException | IOException ex1) {
ex.addSuppressed(ex1);
}
throw ex;
}
} catch (Error | RuntimeException | IOException ex) {
LOG.log(Level.WARNING, "temporary file for optimization failure", ex);
synchronized (OUTSTANDING) {
OUTSTANDING.remove(fileCreation);
}
fileCreation.completeExceptionally(ex); // Propagate exceptions.
}
}
/**
* Installs the newly created file. This function runs on the install thread
* and essentially performs a copy-operation from the temporary file to the
* final file.
*
* @param fileCreation the completeable future that receives the newly
* created file.
* @param destDir the destination directory in which to install the result
* file.
* @param tmpFile the temporary file used to create the data; will be closed
* by this function.
* @param begin a timestamp indicating where this file begins; used to
* generate a pretty file name.
*/
private static void install(CompletableFuture<NewFile> fileCreation, Path destDir, FileChannel tmpFile, DateTime begin) {
try {
try {
synchronized (OUTSTANDING) {
OUTSTANDING.remove(fileCreation);
}
if (fileCreation.isCancelled())
throw new IOException("Installation aborted, due to cancellation.");
final FileUtil.NamedFileChannel newFile = FileUtil.createNewFile(destDir, prefixForTimestamp(begin), ".optimized");
try (Releaseable<FileChannel> out = new Releaseable<>(newFile.getFileChannel())) {
final long fileSize = tmpFile.size();
LOG.log(Level.INFO, "installing {0} ({1} MB)", new Object[]{newFile.getFileName(), fileSize / 1024.0 / 1024.0});
// Copy tmpFile to out.
long offset = 0;
while (offset < fileSize)
offset += tmpFile.transferTo(offset, fileSize - offset, out.get());
out.get().force(true); // Ensure new file is safely written to permanent storage.
// Complete future with newly created file.
fileCreation.complete(new NewFile(newFile.getFileName(), new ReadonlyTableFile(new GCCloseable<>(out.release()))));
} catch (Error | RuntimeException | IOException | OncRpcException ex) {
// Ensure new file gets destroyed if an error occurs during copying.
try {
Files.delete(newFile.getFileName());
} catch (Error | RuntimeException | IOException ex1) {
ex.addSuppressed(ex1);
}
throw ex;
}
} finally {
// Close tmp file that we got from fjpCreateTmpFile.
tmpFile.close();
}
} catch (Error | RuntimeException | IOException | OncRpcException ex) {
LOG.log(Level.WARNING, "unable to install new file", ex);
fileCreation.completeExceptionally(ex); // Propagate error to future.
}
}
/**
* Compute a prefix for a to-be-installed file.
*
* @param timestamp the timestamp on which to base the prefix.
* @return a file prefix that represents the timestamp in its name.
*/
private static String prefixForTimestamp(DateTime timestamp) {
return String.format("monsoon-%04d%02d%02d-%02d%02d", timestamp.getYear(), timestamp.getMonthOfYear(), timestamp.getDayOfMonth(), timestamp.getHourOfDay(), timestamp.getMinuteOfHour());
}
/**
* The result of installing a new optimized file.
*/
@RequiredArgsConstructor
@Getter
public static class NewFile {
/**
* The name of the newly installed file.
*/
@NonNull
private final Path name;
/**
* The contents of the newly installed file.
*/
@NonNull
private final TSData data;
}
}
|
When optimizing multiple files, handle them in order (reduces number of blocks in tables file).
|
history/src/main/java/com/groupon/lex/metrics/history/xdr/TSDataOptimizerTask.java
|
When optimizing multiple files, handle them in order (reduces number of blocks in tables file).
|
|
Java
|
bsd-3-clause
|
adb66d660fd9434f964f5c89c0a78157878b6213
| 0
|
dejlek/jlib
|
package com.areen.jlib.gui;
import java.awt.Dimension;
import java.util.Vector;
import javax.swing.ComboBoxModel;
import javax.swing.JComboBox;
/**
* This class implements a JComboBox which takes into account size of its components and make the pop-up
* window wide enough.
*
* The code has been borrowed from http://www.jroller.com/santhosh/entry/make_jcombobox_popup_wide_enough
*
* Santhosh took the code from the http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4618607 and improved
* it further.
*/
public class WideComboBox extends JComboBox {
private boolean layingOut = false;
private int ww;
private int hh;
public WideComboBox() {
super();
}
public WideComboBox(final Object[] items) {
super(items);
}
public WideComboBox(Vector items) {
super(items);
}
public WideComboBox(ComboBoxModel aModel) {
super(aModel);
}
/**
* @{@inheritDoc }
*/
@Override
public void doLayout() {
try {
layingOut = true;
super.doLayout();
} finally {
layingOut = false;
} // finally
} // doLayout() method
/**
* @{@inheritDoc }
*
* @return Dimension object containing a much better dimension than the one from the JComboBox.
*/
@Override
public Dimension getSize() {
Dimension curDim = super.getSize();
if (!layingOut) {
ww = Math.max(ww, curDim.width);
ww = Math.max(ww, getPreferredSize().width);
hh = Math.max(hh, curDim.height);
hh = Math.max(hh, getPreferredSize().height);
} // if
return new Dimension(ww, hh);
} // getSize() method
} // WideComboBox class
// $Id$
|
src/main/java/com/areen/jlib/gui/WideComboBox.java
|
package com.areen.jlib.gui;
import java.awt.Dimension;
import java.util.Vector;
import javax.swing.ComboBoxModel;
import javax.swing.JComboBox;
/**
* This class implements a JComboBox which takes into account size of its components and make the pop-up
* window wide enough.
*
* The code has been borrowed from http://www.jroller.com/santhosh/entry/make_jcombobox_popup_wide_enough
*
* Santhosh took the code from the http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4618607 and improved
* it further.
*/
public class WideComboBox extends JComboBox {
private boolean layingOut = false;
private Dimension dim = new Dimension(0, 0);
public WideComboBox() {
super();
}
public WideComboBox(final Object[] items) {
super(items);
}
public WideComboBox(Vector items) {
super(items);
}
public WideComboBox(ComboBoxModel aModel) {
super(aModel);
}
/**
* @{@inheritDoc }
*/
@Override
public void doLayout() {
try {
layingOut = true;
super.doLayout();
} finally {
layingOut = false;
} // finally
} // doLayout() method
/**
* @{@inheritDoc }
*
* @return Dimension object containing a much better dimension than the one from the JComboBox.
*/
@Override
public Dimension getSize() {
Dimension curDim = super.getSize();
if (!layingOut) {
dim.width = Math.max(dim.width, curDim.width);
dim.width = Math.max(dim.width, getPreferredSize().width);
dim.height = Math.max(dim.height, curDim.height);
dim.height = Math.max(dim.height, getPreferredSize().height);
}
return dim;
} // getSize() method
} // WideComboBox class
// $Id$
|
This commit hopefully fixes the resizing problems.
Related issue(s): APC-613
git-svn-id: a6571597954494654385eb5e6693f686f629ea20@178 bee1aa47-a06f-436c-a2f2-6a1efea3c100
|
src/main/java/com/areen/jlib/gui/WideComboBox.java
|
This commit hopefully fixes the resizing problems. Related issue(s): APC-613
|
|
Java
|
bsd-3-clause
|
abad5ad4b9bafeb0616f28a3dad49da7162678fc
| 0
|
jthrun/sdl_android,anildahiya/sdl_android,jthrun/sdl_android,smartdevicelink/sdl_android
|
package com.smartdevicelink;
import android.content.Context;
import android.net.Uri;
import android.support.test.InstrumentationRegistry;
import junit.framework.TestCase;
public class AndroidTestCase2 extends TestCase {
public Context mContext;
public AndroidTestCase2(){
mContext = InstrumentationRegistry.getTargetContext();
}
public Context getContext(){
return mContext;
}
public void setContext(Context context){}
public void assertActivityRequiresPermission(String packageName, String className, String permission){}
public void assertReadingContentUriRequiresPermission(Uri uri, String permission){}
public void assertWritingContentUriRequiresPermission(Uri uri, String permission){}
protected void scrubClass(Class<?> testCaseClass){}
protected void setUp() throws Exception{}
protected void tearDown() throws Exception{}
}
|
sdl_android/src/androidTest/java/com/smartdevicelink/AndroidTestCase2.java
|
package com.smartdevicelink;
import android.content.Context;
import android.net.Uri;
import android.support.test.InstrumentationRegistry;
import junit.framework.TestCase;
public class AndroidTestCase2 extends TestCase {
public Context mContext;
public AndroidTestCase2(){
mContext = InstrumentationRegistry.getTargetContext();
try {
setUp();
} catch (Exception e) {
e.printStackTrace();
}
}
public Context getContext(){
return mContext;
}
public void setContext(Context context){}
public void assertActivityRequiresPermission(String packageName, String className, String permission){}
public void assertReadingContentUriRequiresPermission(Uri uri, String permission){}
public void assertWritingContentUriRequiresPermission(Uri uri, String permission){}
protected void scrubClass(Class<?> testCaseClass){}
protected void setUp() throws Exception{}
protected void tearDown() throws Exception{}
}
|
Remove unnecessary call to setUp in AndroidTestCase2 constructor
|
sdl_android/src/androidTest/java/com/smartdevicelink/AndroidTestCase2.java
|
Remove unnecessary call to setUp in AndroidTestCase2 constructor
|
|
Java
|
isc
|
09a3bb8e46da52b7e5917c73b001b1d11d977ad7
| 0
|
simmel/SecurerString,simmel/SecurerString
|
import org.junit.After;
import java.lang.management.*;
import java.io.*;
public class SearchInHeap {
@After public void searchInHeap() throws IOException, InterruptedException {
String pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "");
System.out.println(pid);
String tmp = System.getProperty("temporaryDir");
String hprof = tmp + "/" + pid + ".hprof";
System.out.println(hprof);
String jhatPort = Integer.toString(7000 + Integer.parseInt(System.getProperty("org.gradle.test.worker")));
System.out.println(jhatPort);
Runtime.getRuntime().exec(new String[] {"jmap", String.format("-dump:format=b,file=%s", hprof), pid}).waitFor();
new File(hprof).delete();
}
}
|
src/test/java/SearchInHeap.java
|
import org.junit.After;
import java.lang.management.*;
public class SearchInHeap {
@After public void searchInHeap() {
String pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "");
System.out.println(pid);
String tmp = System.getProperty("temporaryDir");
String hprof = tmp + "/" + pid + ".hprof";
System.out.println(hprof);
String jhatPort = Integer.toString(7000 + Integer.parseInt(System.getProperty("org.gradle.test.worker")));
System.out.println(jhatPort);
}
}
|
Dump the heap with jmap
|
src/test/java/SearchInHeap.java
|
Dump the heap with jmap
|
|
Java
|
mit
|
c6432c9922157c9c48397aa06ffae3ffde31c9bd
| 0
|
Coryoka/Visziektes,Coryoka/Visziektes
|
package Main;
import Domain.Gebruiker;
import datasource.AquariumDAO;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.TextField;
import javafx.stage.Stage;
import java.io.IOException;
import java.net.URL;
import java.sql.SQLException;
import java.util.ResourceBundle;
public class AquariumToevoegenController implements Initializable {
private Gebruiker gebruiker;
private AquariumDAO aquariumDAO;
@FXML TextField VolumeInLiters;
@FXML TextField Tempratuur;
@FXML TextField AqWatertype;
@FXML TextField AantalverversingenPerWeek;
@FXML TextField ProcentwaterPerVerversing;
@FXML TextField AqLengteInCm;
@FXML TextField AqBreedteinCm;
@FXML TextField AqHoogteInCm;
@FXML Button AqAdd;
@FXML Button AqAnnuleren;
public AquariumToevoegenController(Gebruiker gebruiker){this.gebruiker=gebruiker;}
@Override
public void initialize(URL location, ResourceBundle resources) {
try {
aquariumDAO = new AquariumDAO();
AqAnnuleren.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
FXMLLoader loader = new FXMLLoader(getClass().getClassLoader().getResource("aquariums.fxml"));
AquariumsController controller = new AquariumsController(gebruiker);
loader.setController(controller);
Stage stage = (Stage)AqAnnuleren.getScene().getWindow();
Parent root = null;
try {
root = loader.load();
} catch (Exception e) {
e.printStackTrace();
}
Scene scene = new Scene(root, 1280, 720);
stage.setScene(scene);
stage.show();
}
});
AqAdd.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
try {
aquariumDAO.InsertAquarium(gebruiker.getNaam(),Integer.parseInt(VolumeInLiters.getText()),Integer.parseInt(Tempratuur.getText()),AqWatertype.getText(),Integer.parseInt(AantalverversingenPerWeek.getText()),
Integer.parseInt(ProcentwaterPerVerversing.getText()),Integer.parseInt(AqLengteInCm.getText()),Integer.parseInt(AqBreedteinCm.getText()),Integer.parseInt(AqHoogteInCm.getText()));
FXMLLoader loader = new FXMLLoader(getClass().getClassLoader().getResource("aquariums.fxml"));
AquariumsController controller = new AquariumsController(gebruiker);
loader.setController(controller);
Stage stage = (Stage)AqAdd.getScene().getWindow();
Parent root = null;
try {
root = loader.load();
} catch (Exception e) {
e.printStackTrace();
}
Scene scene = new Scene(root, 1280, 720);
stage.setScene(scene);
stage.show();
} catch (SQLException e) {
e.printStackTrace();
}
}
});
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}
|
src/main/java/Main/AquariumToevoegenController.java
|
package Main;
import Domain.Gebruiker;
import datasource.AquariumDAO;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.TextField;
import javafx.stage.Stage;
import java.io.IOException;
import java.net.URL;
import java.sql.SQLException;
import java.util.ResourceBundle;
public class AquariumToevoegenController implements Initializable {
private Gebruiker gebruiker;
private AquariumDAO aquariumDAO;
@FXML TextField VolumeInLiters;
@FXML TextField Tempratuur;
@FXML TextField AqWatertype;
@FXML TextField AantalverversingenPerWeek;
@FXML TextField ProcentwaterPerVerversing;
@FXML TextField AqLengteInCm;
@FXML TextField AqBreedteinCm;
@FXML TextField AqHoogteInCm;
@FXML Button AqAdd;
@FXML Button AqAnnuleren;
public AquariumToevoegenController(Gebruiker gebruiker){this.gebruiker=gebruiker;}
@Override
public void initialize(URL location, ResourceBundle resources) {
try {
aquariumDAO = new AquariumDAO();
AqAnnuleren.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
FXMLLoader loader = new FXMLLoader(getClass().getClassLoader().getResource("aquariums.fxml"));
AquariumsController controller = new AquariumsController(gebruiker);
loader.setController(controller);
Stage stage = (Stage)AqAnnuleren.getScene().getWindow();
Parent root = null;
try {
root = loader.load();
} catch (Exception e) {
e.printStackTrace();
}
Scene scene = new Scene(root, 1280, 720);
stage.setScene(scene);
stage.show();
}
});
AqAdd.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
try {
aquariumDAO.InsertAquarium(gebruiker.getNaam(),Integer.parseInt(VolumeInLiters.getText()),Integer.parseInt(Tempratuur.getText()),AqWatertype.getText(),Integer.parseInt(AantalverversingenPerWeek.getText()),
Integer.parseInt(ProcentwaterPerVerversing.getText()),Integer.parseInt(AqLengteInCm.getText()),Integer.parseInt(AqBreedteinCm.getText()),Integer.parseInt(AqHoogteInCm.getText()));
FXMLLoader loader = new FXMLLoader(getClass().getClassLoader().getResource("aquariums.fxml"));
AquariumsController controller = new AquariumsController(gebruiker);
loader.setController(controller);
Stage stage = (Stage)AqAdd.getScene().getWindow();
Parent root = null;
try {
root = loader.load();
} catch (Exception e) {
e.printStackTrace();
}
Scene scene = new Scene(root, 1280, 720);
stage.setScene(scene);
stage.show();
} catch (SQLException e) {
e.printStackTrace();
}}
});
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}
|
Nieuw aquarium toevoegen
|
src/main/java/Main/AquariumToevoegenController.java
|
Nieuw aquarium toevoegen
|
|
Java
|
mit
|
4906e9102bac087d262ad3a3d11cbe2dcb18882d
| 0
|
pearlqueen/java-simple-mvc
|
/**
* Copyright (c) 2010 Daniel Murphy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
* Created at 2:47:16 PM, Apr 5, 2010
*/
package com.dmurph.mvc.util;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.ArrayList;
import com.dmurph.mvc.ICloneable;
import com.dmurph.mvc.IDirtyable;
import com.dmurph.mvc.IModel;
import com.dmurph.mvc.IRevertable;
/**
* A full mvc implementation of an {@link ArrayList}. Supports all operations in {@link ICloneable}, {@link IDirtyable},
* and {@link IRevertable}. Also fires property change events for the size of the array ({@link #SIZE}) and the dirty value
* ({@link #DIRTY}), and if an element in the array changed ({@link #ELEMENT}).
*
* @author Daniel Murphy
*/
public class MVCArrayList<E extends Object> extends ArrayList<E> implements IModel, ICloneable, IDirtyable, IRevertable {
private static final long serialVersionUID = 4890270966369581329L;
/**
* Dirty property name for listening to property change events
* @see #addPropertyChangeListener(PropertyChangeListener)
*/
public static final String DIRTY = "ARRAY_LIST_DIRTY";
/**
* Array size property name for listening to property change events
* @see #addPropertyChangeListener(PropertyChangeListener)
*/
public static final String SIZE = "ARRAY_LIST_SIZE";
/**
* A value in the array was changed.
* @see #addPropertyChangeListener(PropertyChangeListener)
*/
public static final String ELEMENT = "ARRAY_LIST_ELEMENT_CHANGED";
private boolean dirty = false;
private final ArrayList<E> saved = new ArrayList<E>();
private final PropertyChangeSupport propertyChangeSupport;
public MVCArrayList(){
propertyChangeSupport = new PropertyChangeSupport(this);
}
@Override
public boolean add(E e) {
boolean ret = super.add(e);
firePropertyChange(SIZE, size() - 1, size());
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
@Override
public void clear() {
if(size() > 0){
int oldSize = size();
super.clear();
firePropertyChange(SIZE, oldSize, 0);
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
}
}
@Override
public E remove(int index) {
E ret = super.remove(index);
firePropertyChange(SIZE, size() - 1, size());
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
@Override
public boolean remove(Object o) {
boolean ret = super.remove(o);
firePropertyChange(SIZE, size() - 1, size());
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
@Override
public E set(int index, E element) {
E ret = super.set(index, element);
firePropertyChange(ELEMENT, ret, element);
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
/**
* Clones from another {@link ArrayList}, if the values are {@link ICloneable}, then
* they will be cloned to this one. Otherwise it's a shallow copy (just sets the same values).
* @param argOther an {@link ArrayList}
* @see com.dmurph.mvc.ICloneable#cloneFrom(com.dmurph.mvc.ICloneable)
*/
@SuppressWarnings("unchecked")
@Override
public void cloneFrom( ICloneable argOther) {
MVCArrayList<E> other = (MVCArrayList<E>) argOther;
clear();
for(E e : other){
if(e instanceof ICloneable){
add((E) ((ICloneable) e).clone());
}else{
add(e);
}
}
saved.clear();
for(E e : other.saved){
if(e instanceof ICloneable){
saved.add((E) ((ICloneable) e).clone());
}else{
saved.add(e);
}
}
this.dirty = other.dirty;
}
// do shallow clone, need to keep object references
private void setFromSaved(){
clear();
for(E e: saved){
add(e);
}
}
// do shallow clone, need to keep object references
private void setToSaved(){
saved.clear();
for(E e: this){
saved.add(e);
}
}
/**
* @see com.dmurph.mvc.IModel#addPropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(PropertyChangeListener argListener) {
propertyChangeSupport.addPropertyChangeListener(argListener);
}
/**
* @see com.dmurph.mvc.IModel#removePropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(PropertyChangeListener argListener) {
propertyChangeSupport.removePropertyChangeListener(argListener);
}
/**
* Fires a property change event. If the argOldValue == argNewValue
* or argOldValue.equals( argNewValue) then no event is thrown.
* @param argPropertyName property name, should match the get and set methods for property name
* @param argOldValue
* @param argNewValue
*/
private void firePropertyChange(String argPropertyName, Object argOldValue, Object argNewValue) {
propertyChangeSupport.firePropertyChange(argPropertyName, argOldValue, argNewValue);
}
/**
* Clones this object to another {@link MVCArrayList}. If the array values
* are also {@link ICloneable}, then they will be cloned as well. If not, the values
* are just set (shallow copy).
* @see java.util.ArrayList#clone()
*/
@Override
public ICloneable clone(){
MVCArrayList<E> other = new MVCArrayList<E>();
other.cloneFrom(this);
return other;
}
/**
* Also chacks to see if elements in this
* array are dirty, if any are {@link IDirtyable}.
* @see com.dmurph.mvc.IDirtyable#isDirty()
*/
@Override
public boolean isDirty() {
if(dirty){
return true;
}
for(E e : this){
if(e instanceof IDirtyable){
if(((IDirtyable) e).isDirty()){
return true;
}
}
}
return false;
}
/**
* Sets the dirty variable and, if argDirty is false,
* then will call {@link IDirtyable#setDirty(boolean)} on
* all {@link IDirtyable} objects in this array.
* @see com.dmurph.mvc.IDirtyable#setDirty(boolean)
*/
@Override
public boolean setDirty( boolean argDirty) {
boolean oldDirty = dirty;
dirty = argDirty;
if(!dirty){
for(E e: this){
if(e instanceof IDirtyable){
((IDirtyable) e).setDirty(dirty);
}
}
}
firePropertyChange(DIRTY, oldDirty, dirty);
return oldDirty;
}
/**
* Also calls {@link IRevertable#revertChanges()} on all
* objects in the reverted array that are {@link IRevertable}.
* @see com.dmurph.mvc.IRevertable#revertChanges()
*/
@Override
public boolean revertChanges() {
if(!isDirty()){
return false;
}
setFromSaved();
for(E e: this){
if(e instanceof IRevertable){
((IRevertable) e).revertChanges();
}
}
return true;
}
/**
* Also calls {@link IRevertable#saveChanges()()} on all
* objects in the reverted array that are {@link IRevertable}.
* @see com.dmurph.mvc.IRevertable#saveChanges()
*/
@Override
public boolean saveChanges() {
if(!isDirty()){
return false;
}
setToSaved();
for(E e: this){
if(e instanceof IRevertable){
((IRevertable) e).saveChanges();
}
}
return true;
}
}
|
src/main/java/com/dmurph/mvc/util/MVCArrayList.java
|
/**
* Copyright (c) 2010 Daniel Murphy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
* Created at 2:47:16 PM, Apr 5, 2010
*/
package com.dmurph.mvc.util;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.ArrayList;
import com.dmurph.mvc.ICloneable;
import com.dmurph.mvc.IDirtyable;
import com.dmurph.mvc.IModel;
import com.dmurph.mvc.IRevertable;
/**
* A full mvc implementation of an arraylist. Supports all operations in {@link ICloneable}, {@link IDirtyable},
* and {@link IRevertable}. Also fires property change events when the dirty variable is changed.
*
* @author Daniel Murphy
*/
public class MVCArrayList<E extends Object> extends ArrayList<E> implements IModel, ICloneable, IDirtyable, IRevertable {
private static final long serialVersionUID = 4890270966369581329L;
/**
* Dirty property name.
*/
public static final String DIRTY = "ARRAY_LIST_DIRTY";
private boolean dirty = false;
private final ArrayList<E> orig = new ArrayList<E>();
private final PropertyChangeSupport propertyChangeSupport;
public MVCArrayList(){
propertyChangeSupport = new PropertyChangeSupport(this);
}
@Override
public boolean add(E e) {
boolean ret = super.add(e);
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
@Override
public void clear() {
if(size() > 0){
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
super.clear();
}
}
@Override
public E remove(int index) {
E ret = super.remove(index);
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
@Override
public boolean remove(Object o) {
boolean ret = super.remove(o);
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
@Override
public E set(int index, E element) {
E ret = super.set(index, element);
boolean old = dirty;
dirty = true;
firePropertyChange(DIRTY, old, dirty);
return ret;
}
/**
* Clones from another {@link ArrayList}, if the values are {@link ICloneable}, then
* they will be cloned to this one. Otherwise it's a shallow copy (just sets the same values).
* @param argOther an {@link ArrayList}
* @see com.dmurph.mvc.ICloneable#cloneFrom(com.dmurph.mvc.ICloneable)
*/
@SuppressWarnings("unchecked")
@Override
public void cloneFrom( ICloneable argOther) {
MVCArrayList<E> other = (MVCArrayList<E>) argOther;
clear();
for(E e : other){
if(e instanceof ICloneable){
add((E) ((ICloneable) e).clone());
}else{
add(e);
}
}
orig.clear();
for(E e : other.orig){
if(e instanceof ICloneable){
orig.add((E) ((ICloneable) e).clone());
}else{
orig.add(e);
}
}
}
// do shallow clone, need to keep object references
private void cloneFromOrig(){
clear();
for(E e: orig){
add(e);
}
}
// do shallow clone, need to keep object references
private void cloneToOrig(){
orig.clear();
for(E e: this){
orig.add(e);
}
}
/**
* @see com.dmurph.mvc.IModel#addPropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(PropertyChangeListener argListener) {
propertyChangeSupport.addPropertyChangeListener(argListener);
}
/**
* @see com.dmurph.mvc.IModel#removePropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(PropertyChangeListener argListener) {
propertyChangeSupport.removePropertyChangeListener(argListener);
}
/**
* Fires a property change event. If the argOldValue == argNewValue
* or argOldValue.equals( argNewValue) then no event is thrown.
* @param argPropertyName property name, should match the get and set methods for property name
* @param argOldValue
* @param argNewValue
*/
private void firePropertyChange(String argPropertyName, Object argOldValue, Object argNewValue) {
propertyChangeSupport.firePropertyChange(argPropertyName, argOldValue, argNewValue);
}
/**
* Clones this object to another {@link MVCArrayList}. If the array values
* are also {@link ICloneable}, then they will be cloned as well. If not, the values
* are just set (shallow copy).
* @see java.util.ArrayList#clone()
*/
@Override
public ICloneable clone(){
MVCArrayList<E> other = new MVCArrayList<E>();
other.cloneFrom(this);
return other;
}
/**
* @see com.dmurph.mvc.IDirtyable#isDirty()
*/
@Override
public boolean isDirty() {
if(dirty){
return true;
}
for(E e : this){
if(e instanceof IDirtyable){
if(((IDirtyable) e).isDirty()){
return true;
}
}
}
return false;
}
/**
* Just sets the dirty variable
* @see com.dmurph.mvc.IDirtyable#setDirty(boolean)
*/
@Override
public boolean setDirty( boolean argDirty) {
boolean oldDirty = dirty;
dirty = argDirty;
firePropertyChange(DIRTY, oldDirty, dirty);
return oldDirty;
}
/**
* @see com.dmurph.mvc.IRevertable#revertChanges()
*/
@Override
public boolean revertChanges() {
if(!isDirty()){
return false;
}
cloneFromOrig();
for(E e: this){
if(e instanceof IRevertable){
((IRevertable) e).revertChanges();
}
}
return true;
}
/**
* @see com.dmurph.mvc.IRevertable#saveChanges()
*/
@Override
public boolean saveChanges() {
if(!isDirty()){
return false;
}
cloneToOrig();
for(E e: this){
if(e instanceof IRevertable){
((IRevertable) e).saveChanges();
}
}
return true;
}
}
|
updated MVCArrayList to send size changed and element changed properties
|
src/main/java/com/dmurph/mvc/util/MVCArrayList.java
|
updated MVCArrayList to send size changed and element changed properties
|
|
Java
|
mit
|
0a130a395ce65ef6cba53ccdbd68485446379018
| 0
|
mopsalarm/Pr0,mopsalarm/Pr0,mopsalarm/Pr0
|
package com.pr0gramm.app.api.pr0gramm;
import android.content.SharedPreferences;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.annotations.SerializedName;
import com.pr0gramm.app.BuildConfig;
import com.pr0gramm.app.Debug;
import com.pr0gramm.app.util.AndroidUtility;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import okhttp3.CookieJar;
import okhttp3.HttpUrl;
import proguard.annotation.Keep;
import proguard.annotation.KeepClassMembers;
import static com.google.common.base.Objects.equal;
import static java.util.Arrays.asList;
/**
*/
@Singleton
public class LoginCookieHandler implements CookieJar {
private static final Logger logger = LoggerFactory.getLogger("LoginCookieHandler");
private static final String PREF_LOGIN_COOKIE = "LoginCookieHandler.cookieValue";
private final Object lock = new Object();
private final SharedPreferences preferences;
private final Gson gson = new Gson();
private okhttp3.Cookie httpCookie;
private OnCookieChangedListener onCookieChangedListener;
private Optional<Cookie> parsedCookie = Optional.absent();
@Inject
public LoginCookieHandler(SharedPreferences preferences) {
this.preferences = preferences;
String restored = preferences.getString(PREF_LOGIN_COOKIE, null);
if (restored != null && !"null".equals(restored)) {
// logger.info("restoring cookie value from prefs: " + restored);
setLoginCookie(restored);
}
}
@Override
public List<okhttp3.Cookie> loadForRequest(HttpUrl url) {
if (isNoApiRequest(url))
return Collections.emptyList();
if (httpCookie == null || httpCookie.value() == null)
return Collections.emptyList();
return Collections.singletonList(httpCookie);
}
@Override
public void saveFromResponse(HttpUrl url, List<okhttp3.Cookie> cookies) {
if (isNoApiRequest(url))
return;
for (okhttp3.Cookie cookie : cookies) {
if (isLoginCookie(cookie)) {
setLoginCookie(cookie);
}
}
}
private boolean isNoApiRequest(HttpUrl uri) {
return !uri.host().equalsIgnoreCase("pr0gramm.com") && !uri.host().contains(Debug.MOCK_API_HOST);
}
private boolean isLoginCookie(okhttp3.Cookie cookie) {
return "me".equals(cookie.name()) && !Strings.isNullOrEmpty(cookie.value());
}
private void setLoginCookie(String value) {
// convert to a http cookie
setLoginCookie(new okhttp3.Cookie.Builder()
.name("me")
.value(value)
.domain("pr0gramm.com")
.path("/")
.expiresAt(DateTime.now().plusYears(10).getMillis())
.build());
}
private void setLoginCookie(okhttp3.Cookie cookie) {
if (BuildConfig.DEBUG) {
logger.info("Set login cookie: {}", cookie);
}
synchronized (lock) {
boolean notChanged = httpCookie != null && equal(cookie.value(), httpCookie.value());
if (notChanged)
return;
Optional<Cookie> parsedCookie = parseCookie(cookie);
boolean valid = parsedCookie.transform(c -> c.id != null && c.n != null).or(false);
if (valid) {
this.httpCookie = cookie;
this.parsedCookie = parsedCookie;
// store cookie for next time
preferences.edit()
.putString(PREF_LOGIN_COOKIE, cookie.value())
.apply();
} else {
// couldn't parse the cookie or it is not valid
clearLoginCookie(true);
}
}
if (onCookieChangedListener != null)
onCookieChangedListener.onCookieChanged();
}
public void clearLoginCookie(boolean informListener) {
synchronized (lock) {
httpCookie = null;
parsedCookie = Optional.absent();
preferences.edit().remove(PREF_LOGIN_COOKIE).apply();
}
if (informListener && onCookieChangedListener != null)
onCookieChangedListener.onCookieChanged();
}
/**
* Tries to parse the cookie into a {@link LoginCookieHandler.Cookie} instance.
*/
private Optional<Cookie> parseCookie(okhttp3.Cookie cookie) {
if (cookie == null || cookie.value() == null)
return Optional.absent();
try {
String value = AndroidUtility.urlDecode(cookie.value(), Charsets.UTF_8);
return Optional.of(gson.fromJson(value, Cookie.class));
} catch (Exception err) {
logger.warn("Could not parse login cookie!", err);
AndroidUtility.logToCrashlytics(err);
return Optional.absent();
}
}
/**
* Gets the value of the login cookie, if any.
*/
public Optional<String> getLoginCookie() {
return Optional.fromNullable(httpCookie != null ? httpCookie.value() : null);
}
public void setOnCookieChangedListener(OnCookieChangedListener onCookieChangedListener) {
this.onCookieChangedListener = onCookieChangedListener;
}
public Optional<Cookie> getCookie() {
return parsedCookie;
}
/**
* Gets the nonce. There must be a cookie to perform this action.
* You will receive a {@link LoginRequiredException} if there is
* no cookie to get the nonce from.
*/
public Api.Nonce getNonce() throws LoginRequiredException {
Optional<Cookie> cookie = getCookie();
if (!cookie.transform(c -> c.id != null).or(false)) {
if (cookie.isPresent())
clearLoginCookie(true);
throw new LoginRequiredException();
}
return cookie.transform(c -> new Api.Nonce(c.id)).get();
}
/**
* Returns true, if the user has pr0mium status.
*/
public boolean isPaid() {
Object result = getCookie().transform(cookie -> cookie.paid).or(false);
if (result instanceof Boolean)
return (boolean) result;
if (result instanceof Number)
return ((Number) result).intValue() != 0;
return asList("true", "1").contains(result.toString().toLowerCase());
}
public boolean hasCookie() {
return httpCookie != null && parsedCookie.transform(c -> c.id != null).or(false);
}
@Keep
@KeepClassMembers
public static class Cookie {
public String n;
public String id;
public Object paid;
@SerializedName("a")
public Object admin;
}
public interface OnCookieChangedListener {
/**
* Called if the cookie has changed.
*/
void onCookieChanged();
}
/**
*/
public static class LoginRequiredException extends IllegalStateException {
private LoginRequiredException() {
}
}
}
|
app/src/main/java/com/pr0gramm/app/api/pr0gramm/LoginCookieHandler.java
|
package com.pr0gramm.app.api.pr0gramm;
import android.content.SharedPreferences;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.annotations.SerializedName;
import com.pr0gramm.app.BuildConfig;
import com.pr0gramm.app.Debug;
import com.pr0gramm.app.util.AndroidUtility;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import okhttp3.CookieJar;
import okhttp3.HttpUrl;
import proguard.annotation.Keep;
import proguard.annotation.KeepClassMembers;
import static com.google.common.base.Objects.equal;
import static java.util.Arrays.asList;
/**
*/
@Singleton
public class LoginCookieHandler implements CookieJar {
private static final Logger logger = LoggerFactory.getLogger("LoginCookieHandler");
private static final String PREF_LOGIN_COOKIE = "LoginCookieHandler.cookieValue";
private final Object lock = new Object();
private final SharedPreferences preferences;
private final Gson gson = new Gson();
private okhttp3.Cookie httpCookie;
private OnCookieChangedListener onCookieChangedListener;
private Optional<Cookie> parsedCookie = Optional.absent();
@Inject
public LoginCookieHandler(SharedPreferences preferences) {
this.preferences = preferences;
String restored = preferences.getString(PREF_LOGIN_COOKIE, null);
if (restored != null && !"null".equals(restored)) {
// logger.info("restoring cookie value from prefs: " + restored);
setLoginCookie(restored);
}
}
@Override
public List<okhttp3.Cookie> loadForRequest(HttpUrl url) {
if (isNoApiRequest(url))
return Collections.emptyList();
if (httpCookie == null || httpCookie.value() == null)
return Collections.emptyList();
return Collections.singletonList(httpCookie);
}
@Override
public void saveFromResponse(HttpUrl url, List<okhttp3.Cookie> cookies) {
if (isNoApiRequest(url))
return;
for (okhttp3.Cookie cookie : cookies) {
if (isLoginCookie(cookie)) {
setLoginCookie(cookie);
}
}
}
private boolean isNoApiRequest(HttpUrl uri) {
return !uri.host().equalsIgnoreCase("pr0gramm.com") && !uri.host().contains(Debug.MOCK_API_HOST);
}
private boolean isLoginCookie(okhttp3.Cookie cookie) {
return "me".equals(cookie.name()) && !Strings.isNullOrEmpty(cookie.value());
}
private void setLoginCookie(String value) {
// convert to a http cookie
setLoginCookie(new okhttp3.Cookie.Builder()
.name("me")
.value(value)
.domain("pr0gramm.com")
.path("/")
.expiresAt(DateTime.now().plusYears(10).getMillis())
.build());
}
private void setLoginCookie(okhttp3.Cookie cookie) {
if (BuildConfig.DEBUG) {
logger.info("Set login cookie: {}", cookie);
}
synchronized (lock) {
boolean notChanged = httpCookie != null && equal(cookie.value(), httpCookie.value());
if (notChanged)
return;
Optional<Cookie> parsedCookie = parseCookie(cookie);
boolean valid = parsedCookie.transform(c -> c.id != null && c.n != null).or(false);
if (valid) {
this.httpCookie = cookie;
this.parsedCookie = parsedCookie;
// store cookie for next time
preferences.edit()
.putString(PREF_LOGIN_COOKIE, cookie.value())
.apply();
} else {
// couldn't parse the cookie or it is not valid
clearLoginCookie(true);
}
}
if (onCookieChangedListener != null)
onCookieChangedListener.onCookieChanged();
}
public void clearLoginCookie(boolean informListener) {
synchronized (lock) {
httpCookie = null;
parsedCookie = Optional.absent();
preferences.edit().remove(PREF_LOGIN_COOKIE).apply();
}
if (informListener && onCookieChangedListener != null)
onCookieChangedListener.onCookieChanged();
}
/**
* Tries to parse the cookie into a {@link LoginCookieHandler.Cookie} instance.
*/
private Optional<Cookie> parseCookie(okhttp3.Cookie cookie) {
if (cookie == null || cookie.value() == null)
return Optional.absent();
try {
String value = AndroidUtility.urlDecode(cookie.value(), Charsets.UTF_8);
return Optional.of(gson.fromJson(value, Cookie.class));
} catch (Exception err) {
logger.warn("Could not parse login cookie!", err);
AndroidUtility.logToCrashlytics(err);
return Optional.absent();
}
}
/**
* Gets the value of the login cookie, if any.
*/
public Optional<String> getLoginCookie() {
return Optional.fromNullable(httpCookie != null ? httpCookie.value() : null);
}
public OnCookieChangedListener getOnCookieChangedListener() {
return onCookieChangedListener;
}
public void setOnCookieChangedListener(OnCookieChangedListener onCookieChangedListener) {
this.onCookieChangedListener = onCookieChangedListener;
}
public Optional<Cookie> getCookie() {
return parsedCookie;
}
/**
* Gets the nonce. There must be a cookie to perform this action.
* You will receive a {@link LoginRequiredException} if there is
* no cookie to get the nonce from.
*/
public Api.Nonce getNonce() throws LoginRequiredException {
Optional<Cookie> cookie = getCookie();
if (!cookie.transform(c -> c.id != null).or(false)) {
if (cookie.isPresent())
clearLoginCookie(true);
throw new LoginRequiredException();
}
return cookie.transform(c -> new Api.Nonce(c.id)).get();
}
/**
* Returns true, if the user has pr0mium status.
*/
public boolean isPaid() {
Object result = getCookie().transform(cookie -> cookie.paid).or(false);
if (result instanceof Boolean)
return (boolean) result;
if (result instanceof Number)
return ((Number) result).intValue() != 0;
return asList("true", "1").contains(result.toString().toLowerCase());
}
public boolean hasCookie() {
return httpCookie != null && parsedCookie.transform(c -> c.id != null).or(false);
}
@Keep
@KeepClassMembers
public static class Cookie {
public String n;
public String id;
public Object paid;
@SerializedName("a")
public Object admin;
}
public interface OnCookieChangedListener {
/**
* Called if the cookie has changed.
*/
void onCookieChanged();
}
/**
*/
public static class LoginRequiredException extends IllegalStateException {
private LoginRequiredException() {
}
}
}
|
Cleanup
|
app/src/main/java/com/pr0gramm/app/api/pr0gramm/LoginCookieHandler.java
|
Cleanup
|
|
Java
|
mit
|
017c9d1341dbcd468a45e82065a2895601e51825
| 0
|
jaydeepw/audio-wife,lanxiaoha/audio-wife,antoniotari/audio-wife,HKMOpen/audio-wife,deshion/audio-wife
|
/***
* The MIT License (MIT)
* Copyright (c) 2014 Jaydeep
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package nl.changer.audiowife;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import android.app.Activity;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Handler;
import android.util.Log;
import android.view.View;
import android.widget.SeekBar;
import android.widget.TextView;
/***
* A simple audio player wrapper for Android
***/
public class AudioWife {
private static final String TAG = AudioWife.class.getSimpleName();
/***
* Keep a single copy of this in memory
****/
private static AudioWife mAudioWife;
/****
* Playback progress update time in milliseconds
****/
private static final int AUDIO_PROGRESS_UPDATE_TIME = 100;
private Handler mHandler;
private MediaPlayer mMediaPlayer;
private SeekBar mSeekBar;
private TextView mPlaybackTime;
private View mPlayButton;
private View mPauseButton;
/***
* Audio URI
****/
private static Uri mUri;
public static AudioWife getInstance() {
if (mAudioWife == null)
mAudioWife = new AudioWife();
return mAudioWife;
}
private Runnable mUpdateProgress = new Runnable() {
public void run() {
if (mHandler != null && mMediaPlayer.isPlaying()) {
mSeekBar.setProgress((int) mMediaPlayer.getCurrentPosition());
updatePlaytime(mMediaPlayer.getCurrentPosition());
// repeat the process
mHandler.postDelayed(this, AUDIO_PROGRESS_UPDATE_TIME);
} else {
// DO NOT update UI if the player is paused
}
}
};
/***
* Start playing the audio. Calling this method if the already playing
* audio, has no effect.
****/
public void play() {
if (mUri == null)
throw new IllegalStateException(
"Uri cannot be null. Call init() before calling play()");
if (mMediaPlayer == null)
throw new IllegalStateException("Call init() before calling play()");
if (mMediaPlayer.isPlaying())
return;
mHandler.postDelayed(mUpdateProgress, AUDIO_PROGRESS_UPDATE_TIME);
mMediaPlayer.start();
setPausable();
}
/***
* Pause the audio being played. Calling this method has no effect if the
* audio is already paused
*/
public void pause() {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
setPlayable();
}
}
private void updatePlaytime(int currentTime) {
long totalDuration = 0;
if (mMediaPlayer != null) {
try {
totalDuration = mMediaPlayer.getDuration();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
StringBuilder playbackStr = new StringBuilder();
// set the current time
// its ok to show 00:00 in the UI
playbackStr.append(String.format(
"%02d:%02d",
TimeUnit.MILLISECONDS.toMinutes((long) currentTime),
TimeUnit.MILLISECONDS.toSeconds((long) currentTime)
- TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS
.toMinutes((long) currentTime))));
playbackStr.append("/");
// set total time as the audio is being played
if (totalDuration != 0) {
playbackStr.append(String.format(
"%02d:%02d",
TimeUnit.MILLISECONDS.toMinutes((long) totalDuration),
TimeUnit.MILLISECONDS.toSeconds((long) totalDuration)
- TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS
.toMinutes((long) totalDuration))));
} else
Log.w(TAG, "Something strage this audio track duration in zero");
mPlaybackTime.setText(playbackStr);
// DebugLog.i(currentTime + " / " + totalDuration);
}
private void setPlayable() {
if (mPlayButton != null)
mPlayButton.setVisibility(View.VISIBLE);
if (mPauseButton != null)
mPauseButton.setVisibility(View.GONE);
}
private void setPausable() {
if (mPlayButton != null)
mPlayButton.setVisibility(View.GONE);
if (mPauseButton != null)
mPauseButton.setVisibility(View.VISIBLE);
}
/***
* Initialize the audio player. This method should be the first one to be
* called before starting to play audio using {@link AudioWife}
*
* @param ctx
* {@link Activity} Context
* @param uri
* Uri of the audio to be played.
****/
public AudioWife init(Context ctx, Uri uri, View playBtn, View pauseBtn,
TextView playTime) {
if (uri == null)
throw new IllegalArgumentException("Uri cannot be null");
if (mAudioWife == null)
mAudioWife = new AudioWife();
mUri = uri;
mPlayButton = playBtn;
mPauseButton = pauseBtn;
mPlaybackTime = playTime;
mHandler = new Handler();
initPlayer(ctx);
return this;
}
public AudioWife setSeekBar(SeekBar seekbar) {
mSeekBar = seekbar;
initMediaSeekBar();
return this;
}
/****
* Initialize and prepare the audio player
****/
private void initPlayer(Context ctx) {
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
try {
mMediaPlayer.setDataSource(ctx, mUri);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
try {
mMediaPlayer.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
mMediaPlayer
.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
// set UI when audio finished playing
int currentPlayTime = 0;
mSeekBar.setProgress((int) currentPlayTime);
updatePlaytime(currentPlayTime);
setPlayable();
}
});
}
private void initMediaSeekBar() {
// update seekbar
long finalTime = mMediaPlayer.getDuration();
mSeekBar.setMax((int) finalTime);
mSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
mMediaPlayer.seekTo(seekBar.getProgress());
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {
}
});
}
/***
* Releases the allocated resources.
*
* <p>
* Call {@link #init(Context, Uri, SeekBar, View, View, TextView)} before
* calling {@link #play()}
* </p>
* */
public void release() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
mHandler = null;
}
}
}
|
LibAudioWife/src/nl/changer/audiowife/AudioWife.java
|
/***
* The MIT License (MIT)
* Copyright (c) 2014 Jaydeep
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package nl.changer.audiowife;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import android.app.Activity;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Handler;
import android.util.Log;
import android.view.View;
import android.widget.SeekBar;
import android.widget.TextView;
/***
* A simple audio player wrapper for Android
***/
public class AudioWife {
private static final String TAG = AudioWife.class.getSimpleName();
/***
* Keep a single copy of this in memory
****/
private static AudioWife mAudioWife;
/****
* Playback progress update time in milliseconds
****/
private static final int AUDIO_PROGRESS_UPDATE_TIME = 100;
private Handler mHandler;
private MediaPlayer mMediaPlayer;
private SeekBar mSeekBar;
private TextView mPlaybackTime;
private View mPlayButton;
private View mPauseButton;
/***
* Audio URI
****/
private static Uri mUri;
public static AudioWife getInstance() {
if(mAudioWife == null)
mAudioWife = new AudioWife();
return mAudioWife;
}
private Runnable mUpdateProgress = new Runnable() {
public void run() {
if (mHandler != null && mMediaPlayer.isPlaying()) {
mSeekBar.setProgress((int) mMediaPlayer.getCurrentPosition());
updatePlaytime(mMediaPlayer.getCurrentPosition());
// repeat the process
mHandler.postDelayed(this, AUDIO_PROGRESS_UPDATE_TIME);
} else {
// DO NOT update UI if the player is paused
}
}
};
/***
* Start playing the audio. Calling this method if the already playing
* audio, has no effect.
****/
public void play() {
if (mUri == null)
throw new IllegalStateException(
"Uri cannot be null. Call init() before calling play()");
if (mMediaPlayer == null)
throw new IllegalStateException("Call init() before calling play()");
if (mMediaPlayer.isPlaying())
return;
mHandler.postDelayed(mUpdateProgress, AUDIO_PROGRESS_UPDATE_TIME);
mMediaPlayer.start();
setPausable();
}
/***
* Pause the audio being played. Calling this method has no effect if the
* audio is already paused
*/
public void pause() {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
setPlayable();
}
}
private void updatePlaytime(int currentTime) {
long totalDuration = 0;
if (mMediaPlayer != null) {
try {
totalDuration = mMediaPlayer.getDuration();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
StringBuilder playbackStr = new StringBuilder();
// set the current time
// its ok to show 00:00 in the UI
playbackStr.append(String.format(
"%02d:%02d",
TimeUnit.MILLISECONDS.toMinutes((long) currentTime),
TimeUnit.MILLISECONDS.toSeconds((long) currentTime)
- TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS
.toMinutes((long) currentTime))));
playbackStr.append("/");
// set total time as the audio is being played
if (totalDuration != 0) {
playbackStr.append(String.format(
"%02d:%02d",
TimeUnit.MILLISECONDS.toMinutes((long) totalDuration),
TimeUnit.MILLISECONDS.toSeconds((long) totalDuration)
- TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS
.toMinutes((long) totalDuration))));
} else
Log.w(TAG, "Something strage this audio track duration in zero");
mPlaybackTime.setText(playbackStr);
// DebugLog.i(currentTime + " / " + totalDuration);
}
private void setPlayable() {
if (mPlayButton != null)
mPlayButton.setVisibility(View.VISIBLE);
if (mPauseButton != null)
mPauseButton.setVisibility(View.GONE);
}
private void setPausable() {
if (mPlayButton != null)
mPlayButton.setVisibility(View.GONE);
if (mPauseButton != null)
mPauseButton.setVisibility(View.VISIBLE);
}
/***
* Initialize the audio player. This method should be the first one to be
* called before starting to play audio using {@link AudioWife}
*
* @param ctx
* {@link Activity} Context
* @param uri
* Uri of the audio to be played.
****/
public AudioWife init(Context ctx, Uri uri, SeekBar seekBar,
View playBtn, View pauseBtn, TextView playTime) {
if (uri == null)
throw new IllegalArgumentException("Uri cannot be null");
if(mAudioWife == null)
mAudioWife = new AudioWife();
mUri = uri;
mSeekBar = seekBar;
mPlayButton = playBtn;
mPauseButton = pauseBtn;
mPlaybackTime = playTime;
mHandler = new Handler();
mAudioWife.initPlayer(ctx);
mAudioWife.initMediaSeekBar();
return mAudioWife;
}
/****
* Initialize and prepare the audio player
****/
private void initPlayer(Context ctx) {
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
try {
mMediaPlayer.setDataSource(ctx, mUri);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
try {
mMediaPlayer.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
mMediaPlayer
.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
// set UI when audio finished playing
int currentPlayTime = 0;
mSeekBar.setProgress((int) currentPlayTime);
updatePlaytime(currentPlayTime);
setPlayable();
}
});
}
private void initMediaSeekBar() {
// update seekbar
long finalTime = mMediaPlayer.getDuration();
mSeekBar.setMax((int) finalTime);
mSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
mMediaPlayer.seekTo(seekBar.getProgress());
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {
}
});
}
/***
* Releases the allocated resources.
*
* <p>
* Call {@link #init(Context, Uri, SeekBar, View, View, TextView)} before
* calling {@link #play()}
* </p>
* */
public void release() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
mHandler = null;
}
}
}
|
add SeekBar setter method to the singleton
|
LibAudioWife/src/nl/changer/audiowife/AudioWife.java
|
add SeekBar setter method to the singleton
|
|
Java
|
mit
|
65222e1ed0242cecb6346ea84a6e5090e6010175
| 0
|
DDoS/Bomberman
|
package ecse321.fall2014.group3.bomberman.physics;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.flowpowered.math.vector.Vector2f;
import ecse321.fall2014.group3.bomberman.Direction;
import ecse321.fall2014.group3.bomberman.Game;
import ecse321.fall2014.group3.bomberman.database.Leaderboard.Leader;
import ecse321.fall2014.group3.bomberman.input.Key;
import ecse321.fall2014.group3.bomberman.input.KeyboardState;
import ecse321.fall2014.group3.bomberman.nterface.Interface;
import ecse321.fall2014.group3.bomberman.physics.entity.Entity;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.Player;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.ButtonEntity;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.SliderEntity;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.TextBoxEntity;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.UIBoxEntity;
import ecse321.fall2014.group3.bomberman.ticking.TickingElement;
import ecse321.fall2014.group3.bomberman.world.Level;
import ecse321.fall2014.group3.bomberman.world.Map;
import ecse321.fall2014.group3.bomberman.world.tile.Air;
import ecse321.fall2014.group3.bomberman.world.tile.Tile;
/**
*
*/
public class Physics extends TickingElement {
private static final float PERPENDICULAR_CONTACT_THRESHOLD = 0.05f;
private static final float SLIDING_CONTACT_THRESHOLD = 0.9f;
private final Game game;
private final SweepAndPruneAlgorithm collisionDetection = new SweepAndPruneAlgorithm();
private final Set<Tile> collidableTiles = new HashSet<>();
private final Set<Entity> entities = Collections.synchronizedSet(new HashSet<Entity>());
private final Player player = new Player(Vector2f.ZERO);
private final List<ButtonEntity> buttonOrder = Collections.synchronizedList(new ArrayList<ButtonEntity>());
private volatile int selectedButtonIndex;
private Level currentLevel;
private long mapVersion = 0;
public Physics(Game game) {
super("Physics", 60);
this.game = game;
}
@Override
public void onStart() {
}
@Override
public void onTick(long dt) {
final Level level = game.getWorld().getLevel();
if (currentLevel != level) {
currentLevel = level;
clearEntities();
if (currentLevel.isMenu()) {
setupMenu();
} else {
setupGame();
}
}
if (currentLevel.isMenu()) {
doMenuTick(dt);
} else {
doGameTick(dt);
}
game.getInput().getKeyboardState().clearAll();
}
private void clearEntities() {
// Clear collision detection
entities.clear();
collisionDetection.clear();
collidableTiles.clear();
// Clear UI
buttonOrder.clear();
}
private void setupMenu() {
// Add UI entities
final List<UIBoxEntity> uiEntities = game.getWorld().getLevel().buildUI(game.getSession().getLevel());
entities.addAll(uiEntities);
for (UIBoxEntity uiEntity : uiEntities) {
if (uiEntity instanceof ButtonEntity) {
buttonOrder.add((ButtonEntity) uiEntity);
}
}
selectedButtonIndex = 0;
// Add extra entities for leaderboard menu
if (currentLevel == Level.LEADER_BOARD) {
final Leader[] top = game.getLeaderboard().getTop(10);
for (int i = 0; i < top.length && top[i] != null; i++) {
entities.add(new TextBoxEntity(new Vector2f(4, Interface.VIEW_HEIGHT_TILE - (6 + i)), Vector2f.ONE, top[i].getFormatted()));
}
}
}
private void doMenuTick(long dt) {
final KeyboardState keyboardState = game.getInput().getKeyboardState();
final int selectedShift = keyboardState.getAndClearPressCount(Key.DOWN) - keyboardState.getAndClearPressCount(Key.UP);
final int buttonCount = buttonOrder.size();
final int oldSelected = selectedButtonIndex;
final int newSelected = ((oldSelected + selectedShift) % buttonCount + buttonCount) % buttonCount;
if (buttonCount > 0) {
buttonOrder.get(oldSelected).setSelected(false);
buttonOrder.get(newSelected).setSelected(true);
}
selectedButtonIndex = newSelected;
final ButtonEntity selectedButton = getSelectedButton();
if (selectedButton instanceof SliderEntity) {
final int sliderShift = keyboardState.getAndClearPressCount(Key.RIGHT) - keyboardState.getAndClearPressCount(Key.LEFT);
((SliderEntity) selectedButton).add(sliderShift);
}
}
private void setupGame() {
// Add player
entities.add(player);
player.setPosition(Vector2f.ONE);
collisionDetection.add(player);
// Add UI
final String levelString = currentLevel.isBonus() ? "Bonus level " + -currentLevel.getNumber() : "Level " + currentLevel.getNumber();
entities.add(new TextBoxEntity(new Vector2f(Map.WIDTH / 4f, Map.HEIGHT - 1.25f), new Vector2f(2, 2), levelString));
// Add enemies
}
private void doGameTick(long dt) {
final Map map = game.getWorld().getMap();
final long newVersion = map.getVersion();
if (mapVersion < newVersion) {
for (Tile tile : collidableTiles) {
collisionDetection.remove(tile);
}
collidableTiles.clear();
for (int y = 0; y < Map.HEIGHT; y++) {
for (int x = 0; x < Map.WIDTH; x++) {
final Tile tile = map.getTile(x, y);
if (tile.isCollisionEnabled()) {
collidableTiles.add(tile);
collisionDetection.add(tile);
}
}
}
mapVersion = newVersion;
}
collisionDetection.update();
final float timeSeconds = dt / 1e9f;
// Process player input
final Vector2f inputVector = getInputVector().mul(player.getSpeed() * timeSeconds);
// Compute the motion for the tick
Vector2f movement = inputVector;
for (Collidable collidable : player.getCollisionList()) {
// ghost collidables only report collisions, but don't actually collide
if (collidable.isGhost()) {
continue;
}
// Find the intersection of the collision (a box) and the direction
final Intersection intersection = getIntersection(player, collidable);
final Direction direction = getCollisionDirection(intersection, collidable);
// Allow for a small amount of contact on the sides to prevent the player from getting stuck in adjacent tiles
if (intersection.size.dot(direction.getPerpendicularUnit()) < PERPENDICULAR_CONTACT_THRESHOLD) {
continue;
}
// Block the movement in the direction if sufficient contact
movement = blockDirection(movement, direction.getUnit());
// Attempt to shift the player to the nearest free tile when close to one to ease motion in tight spaces
if (collidable instanceof Tile) {
// Check if the percentage of collision is lower than a threshold, signifying that the player is colliding by a minimum amount
if (intersection.size.dot(direction.getPerpendicularUnit()) / player.getCollisionBox().getSize().dot(direction.getPerpendicularUnit()) < SLIDING_CONTACT_THRESHOLD) {
// Get the direction in which to attempt to shift as a unit
final Vector2f offset = intersection.center.sub(collidable.getPosition());
final Vector2f shiftDirection = direction.getPerpendicularUnit().mul(offset).normalize();
// Check if we can shift, by looking for a path around the tile in the shift direction
final Vector2f adjacentPosition = collidable.getPosition().add(shiftDirection);
if (map.isTile(adjacentPosition, Air.class) && map.isTile(adjacentPosition.sub(direction.getUnit()), Air.class)) {
// Redirect the blocked motion towards the free path
movement = movement.add(shiftDirection.mul(inputVector.dot(direction.getUnit())));
}
}
}
}
// Update player movement
player.setPosition(player.getPosition().add(movement));
player.setVelocity(movement.div(timeSeconds));
}
private Vector2f getInputVector() {
final KeyboardState keyboardState = game.getInput().getKeyboardState();
Vector2f input = Vector2f.ZERO;
for (Direction direction : Direction.values()) {
final Key key = direction.getKey();
input = input.add(direction.getUnit().mul(keyboardState.getAndClearPressTime(key) / 1e9f));
}
// Make sure we're not trying to normalize the zero vector
if (input.lengthSquared() > 0) {
input = input.normalize();
}
return input;
}
@Override
public void onStop() {
clearEntities();
mapVersion = 0;
}
public Player getPlayer() {
return player;
}
public Set<Entity> getEntities() {
return entities;
}
public ButtonEntity getSelectedButton() {
if (buttonOrder.size() <= selectedButtonIndex) {
return null;
}
return buttonOrder.get(selectedButtonIndex);
}
/**
* Blocks the movement in the desired direction, which is represented as a unit vector.
*
* @param movement The movement as a vector
* @param unitDirection The unit direction to block. Must be a unit to function correctly!
* @return The new movement but with all motion in the given direction removed
*/
public static Vector2f blockDirection(Vector2f movement, Vector2f unitDirection) {
// Check if we have movement in the direction
if (movement.dot(unitDirection) > 0) {
// Get motion in the direction and subtracted from total movement
return movement.sub(movement.mul(unitDirection.abs()));
}
// If we don't have any motion, don't change anything
return movement;
}
/**
* Gets the direction of a collision. This uses the intersection between the two collided objects, which can be obtained with {@link #getIntersection(Collidable, Collidable)}, the object that was
* collided. The direction found points towards the collided object.
*
* @param intersection The intersection from the collision
* @param other The object that was collided
* @return The direction of the collision
*/
public static Direction getCollisionDirection(Intersection intersection, Collidable other) {
final Vector2f offset = other.getPosition().sub(intersection.center);
return Direction.fromUnit(offset);
}
/**
* Gets the collision intersection information for two object that are colliding. If the object aren't colliding, the resulting information is undefined.
*
* @param object The first object of the collision
* @param other The second object of the collision
* @return An intersection object containing the collision information
*/
public static Intersection getIntersection(Collidable object, Collidable other) {
final Vector2f intersectMax = object.getBoxMaxPoint().min(other.getBoxMaxPoint());
final Vector2f intersectMin = object.getBoxMinPoint().max(other.getBoxMinPoint());
return new Intersection(intersectMax, intersectMin);
}
/**
* Represents an intersection between two colliding objects.
*/
public static class Intersection {
/**
* The max point of the intersection box.
*/
public final Vector2f max;
/**
* The min point of the intersection box.
*/
public final Vector2f min;
/**
* The size of the intersection box as the diagonal vector.
*/
public final Vector2f size;
/**
* The center of the intersection box (halfway up the diagonal).
*/
public final Vector2f center;
private Intersection(Vector2f max, Vector2f min) {
this.max = max;
this.min = min;
size = max.sub(min);
center = min.add(size.div(2));
}
}
}
|
src/main/java/ecse321/fall2014/group3/bomberman/physics/Physics.java
|
package ecse321.fall2014.group3.bomberman.physics;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.flowpowered.math.vector.Vector2f;
import ecse321.fall2014.group3.bomberman.Direction;
import ecse321.fall2014.group3.bomberman.Game;
import ecse321.fall2014.group3.bomberman.database.Leaderboard.Leader;
import ecse321.fall2014.group3.bomberman.input.Key;
import ecse321.fall2014.group3.bomberman.input.KeyboardState;
import ecse321.fall2014.group3.bomberman.nterface.Interface;
import ecse321.fall2014.group3.bomberman.physics.entity.Entity;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.Player;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.ButtonEntity;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.SliderEntity;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.TextBoxEntity;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.UIBoxEntity;
import ecse321.fall2014.group3.bomberman.ticking.TickingElement;
import ecse321.fall2014.group3.bomberman.world.Level;
import ecse321.fall2014.group3.bomberman.world.Map;
import ecse321.fall2014.group3.bomberman.world.tile.Air;
import ecse321.fall2014.group3.bomberman.world.tile.Tile;
/**
*
*/
public class Physics extends TickingElement {
private static final float PERPENDICULAR_CONTACT_THRESHOLD = 0.05f;
private static final float SLIDING_CONTACT_THRESHOLD = 0.9f;
private final Game game;
private final SweepAndPruneAlgorithm collisionDetection = new SweepAndPruneAlgorithm();
private final Set<Tile> collidableTiles = new HashSet<>();
private final Set<Entity> entities = Collections.synchronizedSet(new HashSet<Entity>());
private final Player player = new Player(Vector2f.ZERO);
private final List<ButtonEntity> buttonOrder = Collections.synchronizedList(new ArrayList<ButtonEntity>());
private volatile int selectedButtonIndex;
private Level currentLevel;
private long mapVersion = 0;
public Physics(Game game) {
super("Physics", 60);
this.game = game;
}
@Override
public void onStart() {
}
@Override
public void onTick(long dt) {
final Level level = game.getWorld().getLevel();
if (currentLevel != level) {
currentLevel = level;
clearEntities();
if (currentLevel.isMenu()) {
setupMenu();
} else {
setupGame();
}
}
if (currentLevel.isMenu()) {
doMenuTick(dt);
} else {
doGameTick(dt);
}
}
private void clearEntities() {
// Clear collision detection
entities.clear();
collisionDetection.clear();
collidableTiles.clear();
// Clear UI
buttonOrder.clear();
}
private void setupMenu() {
// Add UI entities
final List<UIBoxEntity> uiEntities = game.getWorld().getLevel().buildUI();
entities.addAll(uiEntities);
for (UIBoxEntity uiEntity : uiEntities) {
if (uiEntity instanceof ButtonEntity) {
buttonOrder.add((ButtonEntity) uiEntity);
}
}
selectedButtonIndex = 0;
// Add extra entities for leaderboard menu
if (currentLevel == Level.LEADER_BOARD) {
final Leader[] top = game.getLeaderboard().getTop(10);
for (int i = 0; i < top.length && top[i] != null; i++) {
entities.add(new TextBoxEntity(new Vector2f(4, Interface.VIEW_HEIGHT_TILE - (6 + i)), Vector2f.ONE, top[i].getFormatted()));
}
}
}
private void doMenuTick(long dt) {
final KeyboardState keyboardState = game.getInput().getKeyboardState();
final int selectedShift = keyboardState.getAndClearPressCount(Key.DOWN) - keyboardState.getAndClearPressCount(Key.UP);
final int sliderShift = keyboardState.getAndClearPressCount(Key.RIGHT) - keyboardState.getAndClearPressCount(Key.LEFT);
final int buttonCount = buttonOrder.size();
final int oldSelected = selectedButtonIndex;
final int newSelected = ((oldSelected + selectedShift) % buttonCount + buttonCount) % buttonCount;
if (buttonCount > 0) {
buttonOrder.get(oldSelected).setSelected(false);
buttonOrder.get(newSelected).setSelected(true);
}
selectedButtonIndex = newSelected;
final ButtonEntity selectedButton = getSelectedButton();
if (selectedButton instanceof SliderEntity) {
((SliderEntity) selectedButton).add(sliderShift);
}
}
private void setupGame() {
// Add player
entities.add(player);
player.setPosition(Vector2f.ONE);
collisionDetection.add(player);
// Add enemies
}
private void doGameTick(long dt) {
final Map map = game.getWorld().getMap();
final long newVersion = map.getVersion();
if (mapVersion < newVersion) {
for (Tile tile : collidableTiles) {
collisionDetection.remove(tile);
}
collidableTiles.clear();
for (int y = 0; y < Map.HEIGHT; y++) {
for (int x = 0; x < Map.WIDTH; x++) {
final Tile tile = map.getTile(x, y);
if (tile.isCollisionEnabled()) {
collidableTiles.add(tile);
collisionDetection.add(tile);
}
}
}
mapVersion = newVersion;
}
collisionDetection.update();
final float timeSeconds = dt / 1e9f;
// Process player input
final Vector2f inputVector = getInputVector().mul(player.getSpeed() * timeSeconds);
// Compute the motion for the tick
Vector2f movement = inputVector;
for (Collidable collidable : player.getCollisionList()) {
// ghost collidables only report collisions, but don't actually collide
if (collidable.isGhost()) {
continue;
}
// Find the intersection of the collision (a box) and the direction
final Intersection intersection = getIntersection(player, collidable);
final Direction direction = getCollisionDirection(intersection, collidable);
// Allow for a small amount of contact on the sides to prevent the player from getting stuck in adjacent tiles
if (intersection.size.dot(direction.getPerpendicularUnit()) < PERPENDICULAR_CONTACT_THRESHOLD) {
continue;
}
// Block the movement in the direction if sufficient contact
movement = blockDirection(movement, direction.getUnit());
// Attempt to shift the player to the nearest free tile when close to one to ease motion in tight spaces
if (collidable instanceof Tile) {
// Check if the percentage of collision is lower than a threshold, signifying that the player is colliding by a minimum amount
if (intersection.size.dot(direction.getPerpendicularUnit()) / player.getCollisionBox().getSize().dot(direction.getPerpendicularUnit()) < SLIDING_CONTACT_THRESHOLD) {
// Get the direction in which to attempt to shift as a unit
final Vector2f offset = intersection.center.sub(collidable.getPosition());
final Vector2f shiftDirection = direction.getPerpendicularUnit().mul(offset).normalize();
// Check if we can shift, by looking for a path around the tile in the shift direction
final Vector2f adjacentPosition = collidable.getPosition().add(shiftDirection);
if (map.isTile(adjacentPosition, Air.class) && map.isTile(adjacentPosition.sub(direction.getUnit()), Air.class)) {
// Redirect the blocked motion towards the free path
movement = movement.add(shiftDirection.mul(inputVector.dot(direction.getUnit())));
}
}
}
}
// Update player movement
player.setPosition(player.getPosition().add(movement));
player.setVelocity(movement.div(timeSeconds));
}
private Vector2f getInputVector() {
final KeyboardState keyboardState = game.getInput().getKeyboardState();
Vector2f input = Vector2f.ZERO;
for (Direction direction : Direction.values()) {
final Key key = direction.getKey();
input = input.add(direction.getUnit().mul(keyboardState.getAndClearPressTime(key) / 1e9f));
}
// Make sure we're not trying to normalize the zero vector
if (input.lengthSquared() > 0) {
input = input.normalize();
}
return input;
}
@Override
public void onStop() {
clearEntities();
mapVersion = 0;
}
public Player getPlayer() {
return player;
}
public Set<Entity> getEntities() {
return entities;
}
public ButtonEntity getSelectedButton() {
if (buttonOrder.size() <= selectedButtonIndex) {
return null;
}
return buttonOrder.get(selectedButtonIndex);
}
/**
* Blocks the movement in the desired direction, which is represented as a unit vector.
*
* @param movement The movement as a vector
* @param unitDirection The unit direction to block. Must be a unit to function correctly!
* @return The new movement but with all motion in the given direction removed
*/
public static Vector2f blockDirection(Vector2f movement, Vector2f unitDirection) {
// Check if we have movement in the direction
if (movement.dot(unitDirection) > 0) {
// Get motion in the direction and subtracted from total movement
return movement.sub(movement.mul(unitDirection.abs()));
}
// If we don't have any motion, don't change anything
return movement;
}
/**
* Gets the direction of a collision. This uses the intersection between the two collided objects, which can be obtained with {@link #getIntersection(Collidable, Collidable)}, the object that was
* collided. The direction found points towards the collided object.
*
* @param intersection The intersection from the collision
* @param other The object that was collided
* @return The direction of the collision
*/
public static Direction getCollisionDirection(Intersection intersection, Collidable other) {
final Vector2f offset = other.getPosition().sub(intersection.center);
return Direction.fromUnit(offset);
}
/**
* Gets the collision intersection information for two object that are colliding. If the object aren't colliding, the resulting information is undefined.
*
* @param object The first object of the collision
* @param other The second object of the collision
* @return An intersection object containing the collision information
*/
public static Intersection getIntersection(Collidable object, Collidable other) {
final Vector2f intersectMax = object.getBoxMaxPoint().min(other.getBoxMaxPoint());
final Vector2f intersectMin = object.getBoxMinPoint().max(other.getBoxMinPoint());
return new Intersection(intersectMax, intersectMin);
}
/**
* Represents an intersection between two colliding objects.
*/
public static class Intersection {
/**
* The max point of the intersection box.
*/
public final Vector2f max;
/**
* The min point of the intersection box.
*/
public final Vector2f min;
/**
* The size of the intersection box as the diagonal vector.
*/
public final Vector2f size;
/**
* The center of the intersection box (halfway up the diagonal).
*/
public final Vector2f center;
private Intersection(Vector2f max, Vector2f min) {
this.max = max;
this.min = min;
size = max.sub(min);
center = min.add(size.div(2));
}
}
}
|
Add level number to in game level
|
src/main/java/ecse321/fall2014/group3/bomberman/physics/Physics.java
|
Add level number to in game level
|
|
Java
|
mit
|
b9714f1097739f388b95dc197aa982388daf948a
| 0
|
SquidDev-CC/CC-Tweaks,SquidDev-CC/CCTweaks
|
package org.squiddev.cctweaks.core.network;
import mcmultipart.multipart.IMultipart;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.Optional;
import org.squiddev.cctweaks.api.IWorldPosition;
import org.squiddev.cctweaks.api.network.*;
import org.squiddev.cctweaks.core.McEvents;
import org.squiddev.cctweaks.core.network.controller.NetworkController;
import org.squiddev.cctweaks.integration.multipart.MultipartIntegration;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Helper methods on networks
*/
public final class NetworkHelpers implements INetworkHelpers {
@Override
public boolean canConnect(IBlockAccess world, BlockPos position, EnumFacing direction) {
IWorldNetworkNode node = NetworkAPI.registry().getNode(world, position.offset(direction));
return node != null && node.canConnect(direction.getOpposite());
}
@Override
public boolean canConnect(IWorldPosition pos, EnumFacing direction) {
return canConnect(pos.getBlockAccess(), pos.getPosition(), direction);
}
@Override
public Set<INetworkNode> getAdjacentNodes(IWorldNetworkNode node) {
return getAdjacentNodes(node, true);
}
@Override
public Set<INetworkNode> getAdjacentNodes(IWorldNetworkNode node, boolean checkExists) {
IWorldPosition position = node.getPosition();
IBlockAccess access = position.getBlockAccess();
// It might happen
if (access == null) return Collections.emptySet();
Set<INetworkNode> nodes = new HashSet<INetworkNode>();
World world = checkExists && access instanceof World ? (World) access : null;
BlockPos blockPos = position.getPosition();
for (EnumFacing direction : EnumFacing.VALUES) {
if (node.canConnect(direction)) {
BlockPos pos = blockPos.offset(direction);
if (world == null || world.isBlockLoaded(pos)) {
IWorldNetworkNode neighbour = NetworkAPI.registry().getNode(access, pos);
if (neighbour != null && neighbour.canConnect(direction.getOpposite())) {
nodes.add(neighbour);
}
}
}
}
return nodes;
}
@Override
public void joinOrCreateNetwork(IWorldNetworkNode node) {
joinOrCreateNetwork(node, getAdjacentNodes(node));
}
@Override
public void joinOrCreateNetwork(INetworkNode node, Set<? extends INetworkNode> connections) {
for (INetworkNode neighbour : connections) {
if (neighbour.getAttachedNetwork() != null) {
INetworkController network = neighbour.getAttachedNetwork();
network.formConnection(neighbour, node);
}
}
if (node.getAttachedNetwork() == null) {
joinNewNetwork(node);
for (INetworkNode neighbour : connections) {
node.getAttachedNetwork().formConnection(node, neighbour);
}
}
}
@Override
public void joinNewNetwork(INetworkNode node) {
if (node.getAttachedNetwork() != null) {
node.getAttachedNetwork().removeNode(node);
}
new NetworkController(node);
}
@Override
public void scheduleJoin(final IWorldNetworkNode node) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
joinOrCreateNetwork(node);
}
});
}
@Override
public void scheduleJoin(final IWorldNetworkNode node, final TileEntity tile) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
World world = tile.getWorld();
if (world != null && world.getTileEntity(tile.getPos()) == tile) {
joinOrCreateNetwork(node);
}
}
});
}
public static void scheduleConnect(final AbstractWorldNode node) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
node.connect();
}
});
}
public static void scheduleConnect(final AbstractWorldNode node, final TileEntity tile) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
World world = tile.getWorld();
if (world != null && world.getTileEntity(tile.getPos()) == tile) {
node.connect();
}
}
});
}
@Optional.Method(modid = MultipartIntegration.MOD_NAME)
public static void scheduleConnect(final AbstractWorldNode node, final IMultipart part) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
if (part.getWorld() != null) {
node.connect();
}
}
});
}
}
|
src/main/java/org/squiddev/cctweaks/core/network/NetworkHelpers.java
|
package org.squiddev.cctweaks.core.network;
import mcmultipart.multipart.IMultipart;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.Optional;
import org.squiddev.cctweaks.api.IWorldPosition;
import org.squiddev.cctweaks.api.network.*;
import org.squiddev.cctweaks.core.McEvents;
import org.squiddev.cctweaks.core.network.controller.NetworkController;
import org.squiddev.cctweaks.integration.multipart.MultipartIntegration;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Helper methods on networks
*/
public final class NetworkHelpers implements INetworkHelpers {
@Override
public boolean canConnect(IBlockAccess world, BlockPos position, EnumFacing direction) {
IWorldNetworkNode node = NetworkAPI.registry().getNode(world, position.offset(direction));
return node != null && node.canConnect(direction.getOpposite());
}
@Override
public boolean canConnect(IWorldPosition pos, EnumFacing direction) {
return canConnect(pos.getBlockAccess(), pos.getPosition(), direction);
}
@Override
public Set<INetworkNode> getAdjacentNodes(IWorldNetworkNode node) {
return getAdjacentNodes(node, true);
}
@Override
public Set<INetworkNode> getAdjacentNodes(IWorldNetworkNode node, boolean checkExists) {
IWorldPosition position = node.getPosition();
IBlockAccess access = position.getBlockAccess();
// It might happen
if (access == null) return Collections.emptySet();
Set<INetworkNode> nodes = new HashSet<INetworkNode>();
World world = checkExists && access instanceof World ? (World) access : null;
BlockPos blockPos = position.getPosition();
for (EnumFacing direction : EnumFacing.VALUES) {
if (node.canConnect(direction)) {
BlockPos pos = blockPos.offset(direction);
if (world == null || world.isBlockLoaded(pos)) {
IWorldNetworkNode neighbour = NetworkAPI.registry().getNode(access, pos);
if (neighbour != null && neighbour.canConnect(direction.getOpposite())) {
nodes.add(neighbour);
}
}
}
}
return nodes;
}
@Override
public void joinOrCreateNetwork(IWorldNetworkNode node) {
joinOrCreateNetwork(node, getAdjacentNodes(node));
}
@Override
public void joinOrCreateNetwork(INetworkNode node, Set<? extends INetworkNode> connections) {
for (INetworkNode neighbour : connections) {
if (neighbour.getAttachedNetwork() != null) {
INetworkController network = neighbour.getAttachedNetwork();
network.formConnection(neighbour, node);
}
}
if (node.getAttachedNetwork() == null) {
joinNewNetwork(node);
for (INetworkNode neighbour : connections) {
node.getAttachedNetwork().formConnection(node, neighbour);
}
}
}
@Override
public void joinNewNetwork(INetworkNode node) {
if (node.getAttachedNetwork() != null) {
node.getAttachedNetwork().removeNode(node);
}
new NetworkController(node);
}
@Override
public void scheduleJoin(final IWorldNetworkNode node) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
joinOrCreateNetwork(node);
}
});
}
@Override
public void scheduleJoin(final IWorldNetworkNode node, final TileEntity tile) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
World world = tile.getWorld();
if (world != null && world.getTileEntity(tile.getPos()) == tile) {
joinNewNetwork(node);
}
}
});
}
public static void scheduleConnect(final AbstractWorldNode node) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
node.connect();
}
});
}
public static void scheduleConnect(final AbstractWorldNode node, final TileEntity tile) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
World world = tile.getWorld();
if (world != null && world.getTileEntity(tile.getPos()) == tile) {
node.connect();
}
}
});
}
@Optional.Method(modid = MultipartIntegration.MOD_NAME)
public static void scheduleConnect(final AbstractWorldNode node, final IMultipart part) {
if (node == null) throw new IllegalArgumentException("node cannot be null");
McEvents.schedule(new Runnable() {
@Override
public void run() {
if (part.getWorld() != null) {
node.connect();
}
}
});
}
}
|
Fix nodes not binding when being placed
This is so embarrassing
|
src/main/java/org/squiddev/cctweaks/core/network/NetworkHelpers.java
|
Fix nodes not binding when being placed
|
|
Java
|
lgpl-2.1
|
4a3c844c3734021917cfe3b35222599ad013e731
| 0
|
vaibhav345/lenskit,amaliujia/lenskit,kluver/lenskit,linjunleo/lenskit,vijayvani/Lenskit,tajinder-txstate/lenskit,blankazucenalg/lenskit,kluver/lenskit,kluver/lenskit,chrysalag/lenskit,martinlaz/lenskit,tajinder-txstate/lenskit,vaibhav345/lenskit,chrysalag/lenskit,martinlaz/lenskit,tajinder-txstate/lenskit,aglne/lenskit,kluver/lenskit,vaibhav345/lenskit,aglne/lenskit,binweiwu/lenskit,kluver/lenskit,amaliujia/lenskit,blankazucenalg/lenskit,vijayvani/Lenskit,vaibhav345/lenskit,binweiwu/lenskit,tajinder-txstate/lenskit,linjunleo/lenskit
|
/*
* LensKit, an open source recommender systems toolkit.
* Copyright 2010-2013 Regents of the University of Minnesota and contributors
* Work on LensKit has been funded by the National Science Foundation under
* grants IIS 05-34939, 08-08692, 08-12148, and 10-17697.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.grouplens.lenskit.knn.item;
import org.grouplens.lenskit.ItemScorer;
import org.grouplens.lenskit.basic.AbstractItemScorer;
import org.grouplens.lenskit.data.Event;
import org.grouplens.lenskit.data.UserHistory;
import org.grouplens.lenskit.data.dao.DataAccessObject;
import org.grouplens.lenskit.data.history.UserHistorySummarizer;
import org.grouplens.lenskit.knn.item.model.ItemItemModel;
import org.grouplens.lenskit.symbols.Symbol;
import org.grouplens.lenskit.transform.normalize.UserVectorNormalizer;
import org.grouplens.lenskit.transform.normalize.VectorTransformation;
import org.grouplens.lenskit.vectors.MutableSparseVector;
import org.grouplens.lenskit.vectors.SparseVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.inject.Inject;
/**
* Score items using an item-item CF model. User ratings are <b>not</b> supplied
* as default preferences.
*
* @author <a href="http://www.grouplens.org">GroupLens Research</a>
*/
public class ItemItemScorer extends AbstractItemScorer implements ItemScorer {
private static final Logger logger = LoggerFactory.getLogger(ItemItemScorer.class);
public static final Symbol NEIGHBORHOOD_SIZE_SYMBOL =
Symbol.of("org.grouplens.lenskit.knn.item.neighborhoodSize");
protected final ItemItemModel model;
@Nonnull
protected final UserVectorNormalizer normalizer;
protected final UserHistorySummarizer summarizer;
@Nonnull
protected final NeighborhoodScorer scorer;
@Nonnull
protected final ItemScoreAlgorithm algorithm;
/**
* Construct a new item-item scorer.
*
* @param dao The DAO.
* @param m The model
* @param sum The history summarizer.
* @param scorer The neighborhood scorer.
* @param algo The item scoring algorithm. It converts neighborhoods to scores.
*/
@Inject
public ItemItemScorer(DataAccessObject dao, ItemItemModel m,
UserHistorySummarizer sum,
NeighborhoodScorer scorer,
ItemScoreAlgorithm algo,
UserVectorNormalizer norm) {
super(dao);
model = m;
summarizer = sum;
this.scorer = scorer;
algorithm = algo;
normalizer = norm;
logger.info("building item-item scorer with scorer {}", scorer);
}
@Nonnull
public UserVectorNormalizer getNormalizer() {
return normalizer;
}
/**
* Score items by computing predicted ratings.
*
* @see ItemScoreAlgorithm#scoreItems(ItemItemModel, SparseVector, MutableSparseVector, NeighborhoodScorer)
*/
@Override
public void score(@Nonnull UserHistory<? extends Event> history,
@Nonnull MutableSparseVector scores) {
final long uid = history.getUserId();
SparseVector summary = summarizer.summarize(history);
VectorTransformation transform = normalizer.makeTransformation(uid, summary);
MutableSparseVector normed = summary.mutableCopy();
transform.apply(normed);
scores.clear();
algorithm.scoreItems(model, normed, scores, scorer);
// untransform the scores
transform.unapply(scores);
}
}
|
lenskit-knn/src/main/java/org/grouplens/lenskit/knn/item/ItemItemScorer.java
|
/*
* LensKit, an open source recommender systems toolkit.
* Copyright 2010-2013 Regents of the University of Minnesota and contributors
* Work on LensKit has been funded by the National Science Foundation under
* grants IIS 05-34939, 08-08692, 08-12148, and 10-17697.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.grouplens.lenskit.knn.item;
import org.grouplens.lenskit.ItemScorer;
import org.grouplens.lenskit.baseline.BaselinePredictor;
import org.grouplens.lenskit.basic.AbstractItemScorer;
import org.grouplens.lenskit.data.Event;
import org.grouplens.lenskit.data.UserHistory;
import org.grouplens.lenskit.data.dao.DataAccessObject;
import org.grouplens.lenskit.data.history.UserHistorySummarizer;
import org.grouplens.lenskit.knn.item.model.ItemItemModel;
import org.grouplens.lenskit.symbols.Symbol;
import org.grouplens.lenskit.transform.normalize.UserVectorNormalizer;
import org.grouplens.lenskit.transform.normalize.VectorTransformation;
import org.grouplens.lenskit.vectors.MutableSparseVector;
import org.grouplens.lenskit.vectors.SparseVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.util.Collection;
/**
* Score items using an item-item CF model. User ratings are <b>not</b> supplied
* as default preferences.
*
* @author <a href="http://www.grouplens.org">GroupLens Research</a>
* @see ItemItemRatingPredictor
*/
public class ItemItemScorer extends AbstractItemScorer implements ItemScorer {
private static final Logger logger = LoggerFactory.getLogger(ItemItemScorer.class);
public static final Symbol NEIGHBORHOOD_SIZE_SYMBOL =
Symbol.of("org.grouplens.lenskit.knn.item.neighborhoodSize");
protected final ItemItemModel model;
@Nonnull
protected final UserVectorNormalizer normalizer;
protected final UserHistorySummarizer summarizer;
@Nonnull
protected final NeighborhoodScorer scorer;
@Nonnull
protected final ItemScoreAlgorithm algorithm;
@Nullable
private final BaselinePredictor baseline;
/**
* Construct a new item-item scorer.
*
* @param dao The DAO.
* @param m The model
* @param sum The history summarizer.
* @param scorer The neighborhood scorer.
* @param algo The item scoring algorithm. It converts neighborhoods to scores.
* @param bl The baseline scorer. If present, it will be used to supply scores that the
* item-item CF algorithm cannot.
*/
@Inject
public ItemItemScorer(DataAccessObject dao, ItemItemModel m,
UserHistorySummarizer sum,
NeighborhoodScorer scorer,
ItemScoreAlgorithm algo,
UserVectorNormalizer norm,
@Nullable BaselinePredictor bl) {
super(dao);
model = m;
summarizer = sum;
this.scorer = scorer;
algorithm = algo;
normalizer = norm;
baseline = bl;
logger.info("building item-item scorer with scorer {}", scorer);
}
@Nonnull
public UserVectorNormalizer getNormalizer() {
return normalizer;
}
/**
* Score items by computing predicted ratings.
*
* @see ItemScoreAlgorithm#scoreItems(ItemItemModel, SparseVector, MutableSparseVector, NeighborhoodScorer)
* @see #makeTransform(long, SparseVector)
*/
@Override
public void score(@Nonnull UserHistory<? extends Event> history,
@Nonnull MutableSparseVector scores) {
final long uid = history.getUserId();
SparseVector summary = summarizer.summarize(history);
VectorTransformation transform = normalizer.makeTransformation(uid, summary);
MutableSparseVector normed = summary.mutableCopy();
transform.apply(normed);
scores.clear();
algorithm.scoreItems(model, normed, scores, scorer);
// untransform the scores
transform.unapply(scores);
if (baseline != null) {
baseline.predict(uid, summary, scores, false);
}
}
}
|
Remove baseline from item-item scorer (refs #311)
|
lenskit-knn/src/main/java/org/grouplens/lenskit/knn/item/ItemItemScorer.java
|
Remove baseline from item-item scorer (refs #311)
|
|
Java
|
lgpl-2.1
|
dcfb75ab05136ece99f653a34fdc14d562d1adcf
| 0
|
kihira/PlayerRugs
|
package uk.kihira.playerrugs.client;
import com.mojang.authlib.GameProfile;
import net.minecraft.client.entity.AbstractClientPlayer;
import net.minecraft.client.renderer.OpenGlHelper;
import net.minecraft.client.renderer.RenderHelper;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.ResourceLocation;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL12;
import uk.kihira.playerrugs.common.tileentities.PlayerRugTE;
public class PlayerRugTESR extends TileEntitySpecialRenderer {
public EnhancedSkullModel headModel = new EnhancedSkullModel(0, 0, 64, 32);
@Override
public void renderTileEntityAt(TileEntity tileEntity, double xPos, double yPos, double zPos, float partialTicks) {
GameProfile profile = ((PlayerRugTE) tileEntity).playerProfile;
ResourceLocation playerSkin = AbstractClientPlayer.locationStevePng;
float angle = tileEntity.getBlockMetadata()*-90f;
boolean standing = tileEntity.getBlockMetadata() >= 4;
if (profile != null) {
/* SkinManager skinManager = Minecraft.getMinecraft().func_152342_ad();
Map skinMap = skinManager.func_152788_a(profile);
// Attempt to load players skin if it is loaded or exists
if (skinMap.containsKey(MinecraftProfileTexture.Type.SKIN)) {
playerSkin = skinManager.func_152792_a((MinecraftProfileTexture) skinMap.get(MinecraftProfileTexture.Type.SKIN), MinecraftProfileTexture.Type.SKIN);
}*/
playerSkin = AbstractClientPlayer.getLocationSkin(profile.getName());
AbstractClientPlayer.getDownloadImageSkin(playerSkin, profile.getName());
}
bindTexture(playerSkin);
// Render head
GL11.glPushMatrix();
GL11.glTranslated(xPos + 0.5f, yPos + (standing ? 0.4999f: 0f), zPos + 0.5f);
GL11.glRotatef(angle, 0f, 1f, 0f);
GL11.glTranslated(0, 0, standing ? 8f/16f: -9f/16f);
GL11.glEnable(GL12.GL_RESCALE_NORMAL);
GL11.glScalef(-1.0F, -1.0F, 1.0F);
GL11.glEnable(GL11.GL_ALPHA_TEST);
GL11.glDisable(GL11.GL_LIGHTING);
if (tileEntity.getWorldObj() != null) {
int i = tileEntity.getWorldObj().getLightBrightnessForSkyBlocks(tileEntity.xCoord, tileEntity.yCoord, tileEntity.zCoord, 0);
int j = i % 65536;
int k = i / 65536;
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, (float)j / 1.0F * 0.95f, (float)k / 1.0F * 0.95f);
}
headModel.render(null, 0.0F, 0.0F, 0.0F, 0f, 0.0F, 0.0625f);
GL11.glPopMatrix();
GL11.glPushMatrix();
GL11.glTranslated(xPos+0.5f, yPos+0.001d, zPos+0.5f);
GL11.glRotatef(angle, 0, 1, 0);
float texHeight = 32;
float texWidth = 64;
Tessellator tess = Tessellator.instance;
RenderHelper.disableStandardItemLighting();
if (standing) {
GL11.glRotatef(90f, 1f, 0f, 0f);
GL11.glTranslatef(0f, 7f/16f, -1f/16f);
}
float xOffset = 4f/16f-0.5f;
float zOffset = 5f/16f-0.5f;
float thickness = 1f/16f;
float yOffset = 1f/16f;
tess.startDrawingQuads();
// Left Arm
if (standing) {
xOffset = -0.5f;
zOffset = 1f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, 44f/texWidth, 20f/texHeight, 48f/ texWidth, 32f/texHeight, texWidth, texHeight);
}
else {
buildBodyPart(xOffset, yOffset, zOffset, -12f/16f, thickness, -4f/16f, 52f/ texWidth, 20f/texHeight, 56f/texWidth, 32f/texHeight, texWidth, texHeight);
}
// Right Arm
xOffset = 12f/16f-0.5f;
zOffset = 1f/16f-0.5f;
if (standing) {
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, 48f/texWidth, 20f/texHeight, 44f/texWidth, 32f/texHeight, texWidth, texHeight);
}
else {
buildBodyPart(xOffset, yOffset, zOffset, 12f/16f, thickness, 4f/16f, 56f/texWidth, 20f/texHeight, 52f/texWidth, 32f/texHeight, texWidth, texHeight);
}
// Body
xOffset = 0.25f-0.5f;
zOffset = 1f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 8f/16f, thickness, 12f/16f, (standing ? 20f : 32f)/texWidth, 20f/texHeight, (standing ? 28f : 40f)/texWidth, 32f/texHeight, texWidth, texHeight);
// Left Leg
xOffset = 0.25f-0.5f;
zOffset = 13f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, (standing ? 8f : 16f)/texWidth, 20f/texHeight, (standing ? 4f : 12f)/texWidth, 32f/texHeight, texWidth, texHeight);
// Right Leg
xOffset = 0.5f-0.5f;
zOffset = 13f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, (standing ? 4f : 12f)/texWidth, 20f/texHeight, (standing ? 8f : 16f)/texWidth, 32f/texHeight, texWidth, texHeight);
tess.draw();
RenderHelper.enableStandardItemLighting();
GL11.glEnable(GL11.GL_LIGHTING);
GL11.glPopMatrix();
}
private void buildBodyPart(float xPos, float yPos, float zPos, float width, float depth, float length, float minU, float minV, float maxU, float maxV, float texWidth, float texHeight) {
Tessellator tess = Tessellator.instance;
float texDepth = depth*16f;
// This if is used if texture should be rotated as width would be longer then length (used for arms)
if (Math.abs(width) > Math.abs(length)) {
// Draws base texture
tess.addVertexWithUV(xPos, yPos, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
// Draws sides
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, maxU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos, yPos, zPos, minU, minV);
float uUpper = maxU + ((minU > maxU) ? (texDepth/texWidth) : -(texDepth/texWidth));
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, uUpper, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, uUpper, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos, yPos, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos+width, yPos, zPos, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
uUpper = minU + ((minU < maxU) ? (texDepth/texWidth) : -(texDepth/texWidth));
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos, zPos, uUpper, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos, uUpper, maxV);
}
else {
// Draws base texture
tess.addVertexWithUV(xPos, yPos, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
// Draws sides
float uUpper = minU + ((minU < maxU) ? (texDepth/texWidth) : -(texDepth/texWidth));
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos, yPos, zPos+length, uUpper, maxV);
tess.addVertexWithUV(xPos, yPos, zPos, uUpper, minV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos, zPos+length, minU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos, maxU-(texDepth/texWidth), minV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU-(texDepth/texWidth), maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos-depth, zPos, maxU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos, minU, minV);
}
}
}
|
src/main/java/uk/kihira/playerrugs/client/PlayerRugTESR.java
|
package uk.kihira.playerrugs.client;
import com.mojang.authlib.GameProfile;
import net.minecraft.client.entity.AbstractClientPlayer;
import net.minecraft.client.renderer.OpenGlHelper;
import net.minecraft.client.renderer.RenderHelper;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.ResourceLocation;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL12;
import uk.kihira.playerrugs.common.tileentities.PlayerRugTE;
public class PlayerRugTESR extends TileEntitySpecialRenderer {
public EnhancedSkullModel headModel = new EnhancedSkullModel(0, 0, 64, 32);
@Override
public void renderTileEntityAt(TileEntity tileEntity, double xPos, double yPos, double zPos, float partialTicks) {
GameProfile profile = ((PlayerRugTE) tileEntity).playerProfile;
ResourceLocation playerSkin = AbstractClientPlayer.locationStevePng;
float angle = tileEntity.getBlockMetadata()*-90f;
boolean standing = tileEntity.getBlockMetadata() >= 4;
if (profile != null) {
/* SkinManager skinManager = Minecraft.getMinecraft().func_152342_ad();
Map skinMap = skinManager.func_152788_a(profile);
// Attempt to load players skin if it is loaded or exists
if (skinMap.containsKey(MinecraftProfileTexture.Type.SKIN)) {
playerSkin = skinManager.func_152792_a((MinecraftProfileTexture) skinMap.get(MinecraftProfileTexture.Type.SKIN), MinecraftProfileTexture.Type.SKIN);
}*/
playerSkin = AbstractClientPlayer.getLocationSkin(profile.getName());
AbstractClientPlayer.getDownloadImageSkin(playerSkin, profile.getName());
}
bindTexture(playerSkin);
// Render head
GL11.glPushMatrix();
GL11.glTranslated(xPos + 0.5f, yPos + (standing ? 0.4999f: 0f), zPos + 0.5f);
GL11.glRotatef(angle, 0f, 1f, 0f);
GL11.glTranslated(0, 0, standing ? 8f/16f: -9f/16f);
GL11.glEnable(GL12.GL_RESCALE_NORMAL);
GL11.glScalef(-1.0F, -1.0F, 1.0F);
GL11.glEnable(GL11.GL_ALPHA_TEST);
GL11.glDisable(GL11.GL_LIGHTING);
if (tileEntity.getWorldObj() != null) {
int i = tileEntity.getWorldObj().getLightBrightnessForSkyBlocks(tileEntity.xCoord, tileEntity.yCoord, tileEntity.zCoord, 0);
int j = i % 65536;
int k = i / 65536;
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, (float)j / 1.0F * 0.95f, (float)k / 1.0F * 0.95f);
}
headModel.render(null, 0.0F, 0.0F, 0.0F, 0f, 0.0F, 0.0625f);
GL11.glPopMatrix();
GL11.glPushMatrix();
GL11.glTranslated(xPos+0.5f, yPos+0.001d, zPos+0.5f);
GL11.glRotatef(angle, 0, 1, 0);
float texHeight = 32;
float texWidth = 64;
Tessellator tess = Tessellator.instance;
RenderHelper.disableStandardItemLighting();
if (standing) {
GL11.glRotatef(90f, 1f, 0f, 0f);
GL11.glTranslatef(0f, 7f/16f, -1f/16f);
}
// Left Arm
float xOffset = 4f/16f-0.5f;
float zOffset = 5f/16f-0.5f;
float thickness = 1f/16f;
float yOffset = 1f/16f;
tess.startDrawingQuads();
if (standing) {
xOffset = -0.5f;
zOffset = 1f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, 44f/texWidth, 20f/texHeight, 48f/ texWidth, 32f/texHeight, texWidth, texHeight);
}
else {
buildBodyPart(xOffset, yOffset, zOffset, -12f/16f, thickness, -4f/16f, 52f/ texWidth, 20f/texHeight, 56f/texWidth, 32f/texHeight, texWidth, texHeight);
}
// Right Arm
xOffset = 12f/16f-0.5f;
zOffset = 1f/16f-0.5f;
if (standing) {
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, 48f/texWidth, 20f/texHeight, 52f/texWidth, 32f/texHeight, texWidth, texHeight);
}
else {
buildBodyPart(xOffset, yOffset, zOffset, 12f/16f, thickness, 4f/16f, 56f/texWidth, 20f/texHeight, 52f/texWidth, 32f/texHeight, texWidth, texHeight);
}
// Body
xOffset = 0.25f-0.5f;
zOffset = 1f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 8f/16f, thickness, 12f/16f, (standing ? 20f : 32f)/texWidth, 20f/texHeight, (standing ? 28f : 40f)/texWidth, 32f/texHeight, texWidth, texHeight);
// Left Leg
xOffset = 0.25f-0.5f;
zOffset = 13f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, (standing ? 8f : 16f)/texWidth, 20f/texHeight, (standing ? 4f : 12f)/texWidth, 32f/texHeight, texWidth, texHeight);
// Right Leg
xOffset = 0.5f-0.5f;
zOffset = 13f/16f-0.5f;
buildBodyPart(xOffset, yOffset, zOffset, 4f/16f, thickness, 12f/16f, (standing ? 4f : 12f)/texWidth, 20f/texHeight, (standing ? 8f : 16f)/texWidth, 32f/texHeight, texWidth, texHeight);
tess.draw();
RenderHelper.enableStandardItemLighting();
GL11.glEnable(GL11.GL_LIGHTING);
GL11.glPopMatrix();
}
private void buildBodyPart(float xPos, float yPos, float zPos, float width, float depth, float length, float minU, float minV, float maxU, float maxV, float texWidth, float texHeight) {
Tessellator tess = Tessellator.instance;
float texDepth = depth*16f;
// This if is used if texture should be rotated as width would be longer then length (used for arms)
if (Math.abs(width) > Math.abs(length)) {
// Draws base texture
tess.addVertexWithUV(xPos, yPos, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
// Draws sides
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, maxU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos, yPos, zPos, minU, minV);
float uUpper = maxU + ((minU > maxU) ? (texDepth/texWidth) : -(texDepth/texWidth));
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, uUpper, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, uUpper, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos, yPos, zPos+length, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos+width, yPos, zPos, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
uUpper = minU + ((minU < maxU) ? (texDepth/texWidth) : -(texDepth/texWidth));
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos, zPos, uUpper, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos, uUpper, maxV);
}
else {
// Draws base texture
tess.addVertexWithUV(xPos, yPos, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
// Draws sides
float uUpper = minU + ((minU < maxU) ? (texDepth/texWidth) : -(texDepth/texWidth));
tess.addVertexWithUV(xPos, yPos-depth, zPos, minU, minV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos, yPos, zPos+length, uUpper, maxV);
tess.addVertexWithUV(xPos, yPos, zPos, uUpper, minV);
tess.addVertexWithUV(xPos, yPos-depth, zPos+length, minU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos, zPos+length, minU, maxV-(texDepth/texHeight));
tess.addVertexWithUV(xPos+width, yPos-depth, zPos+length, maxU, maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos, maxU-(texDepth/texWidth), minV);
tess.addVertexWithUV(xPos+width, yPos, zPos+length, maxU-(texDepth/texWidth), maxV);
tess.addVertexWithUV(xPos+width, yPos-depth, zPos, minU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos-depth, zPos, maxU, minV+(texDepth/texHeight));
tess.addVertexWithUV(xPos, yPos, zPos, maxU, minV);
tess.addVertexWithUV(xPos+width, yPos, zPos, minU, minV);
}
}
}
|
Fix right arm standing texture
|
src/main/java/uk/kihira/playerrugs/client/PlayerRugTESR.java
|
Fix right arm standing texture
|
|
Java
|
apache-2.0
|
a9e9f9bbc1a40aff74293c7327d8208d13adca63
| 0
|
mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData
|
package org.mousephenotype.cda.neo4jLoad.graph;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.neo4j.ogm.session.SessionFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories;
import org.springframework.data.neo4j.transaction.Neo4jTransactionManager;
import javax.validation.constraints.NotNull;
/**
* Created by jmason on 18/03/2017.
*/
@Configuration
@EnableNeo4jRepositories(basePackages = "org.mousephenotype.cda.neo4j.repository")
@PropertySource("file:${user.home}/configfiles/${profile:dev}/application.properties")
public class Neo4jConfig {
@NotNull
@Value("${neo4jDbPath2}")
private String neo4jDbPath;
@NotNull
@Value("${owlpath}")
protected String owlpath;
@NotNull
@Value("${solr.host}")
private String solrBaseUrl;
@Bean(name = "statisticalResultCore")
HttpSolrClient getExperimentCore() {
return new HttpSolrClient(solrBaseUrl + "/statistical-result");
}
@Bean
public org.neo4j.ogm.config.Configuration getConfiguration() {
org.neo4j.ogm.config.Configuration config = new org.neo4j.ogm.config.Configuration();
// To persist the database, uncomment this section
//String pathToDb = Paths.get(".").toAbsolutePath().normalize().toString() + "/target//Users/ckchen/Documents/Neo4j/impc.neo4";
String pathToDb = neo4jDbPath;
config
.driverConfiguration()
.setDriverClassName("org.neo4j.ogm.drivers.embedded.driver.EmbeddedDriver")
.setURI("file://" + pathToDb);
System.out.println(config);
return config;
}
@Bean
public SessionFactory sessionFactory() {
return new SessionFactory(getConfiguration(), "org.mousephenotype.cda.neo4j"); // both entity and repository
}
@Bean
public Neo4jTransactionManager transactionManager() {
return new Neo4jTransactionManager(sessionFactory());
}
}
|
neo4j-load/src/main/java/org/mousephenotype/cda/neo4jLoad/graph/Neo4jConfig.java
|
package org.mousephenotype.cda.neo4jLoad.graph;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.neo4j.ogm.session.SessionFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories;
import org.springframework.data.neo4j.transaction.Neo4jTransactionManager;
import javax.validation.constraints.NotNull;
/**
* Created by jmason on 18/03/2017.
*/
@Configuration
@EnableNeo4jRepositories(basePackages = "org.mousephenotype.cda.neo4j.repository")
@PropertySource("file:${user.home}/configfiles/${profile:dev}/application.properties")
public class Neo4jConfig {
@NotNull
@Value("${neo4jDbPathTest}")
private String neo4jDbPath;
@NotNull
@Value("${owlpath}")
protected String owlpath;
@NotNull
@Value("${solr.host}")
private String solrBaseUrl;
@Bean(name = "statisticalResultCore")
HttpSolrClient getExperimentCore() {
return new HttpSolrClient(solrBaseUrl + "/statistical-result");
}
@Bean
public org.neo4j.ogm.config.Configuration getConfiguration() {
org.neo4j.ogm.config.Configuration config = new org.neo4j.ogm.config.Configuration();
// To persist the database, uncomment this section
//String pathToDb = Paths.get(".").toAbsolutePath().normalize().toString() + "/target//Users/ckchen/Documents/Neo4j/impc.neo4";
String pathToDb = neo4jDbPath;
config
.driverConfiguration()
.setDriverClassName("org.neo4j.ogm.drivers.embedded.driver.EmbeddedDriver")
.setURI("file://" + pathToDb);
System.out.println(config);
return config;
}
@Bean
public SessionFactory sessionFactory() {
return new SessionFactory(getConfiguration(), "org.mousephenotype.cda.neo4j"); // both entity and repository
}
@Bean
public Neo4jTransactionManager transactionManager() {
return new Neo4jTransactionManager(sessionFactory());
}
}
|
fixed neo4j loader path
|
neo4j-load/src/main/java/org/mousephenotype/cda/neo4jLoad/graph/Neo4jConfig.java
|
fixed neo4j loader path
|
|
Java
|
apache-2.0
|
e465180ebe39fd59994186d2499b3a59652c4441
| 0
|
privacyidea/privacyidea-authenticator
|
/*
privacyIDEA Authenticator
Authors: Nils Behlen <nils.behlen@netknights.it>
Copyright (c) 2017-2019 NetKnights GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package it.netknights.piauthenticator;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import androidx.test.espresso.DataInteraction;
import androidx.test.espresso.ViewInteraction;
import androidx.test.espresso.matcher.ViewMatchers;
import androidx.test.filters.LargeTest;
import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.rule.ActivityTestRule;
import it.netknights.piauthenticator.viewcontroller.MainActivity;
import static androidx.test.espresso.Espresso.onData;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.Espresso.openActionBarOverflowOrOptionsMenu;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.longClick;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.action.ViewActions.scrollTo;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.hasDescendant;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withClassName;
import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription;
import static androidx.test.espresso.matcher.ViewMatchers.withEffectiveVisibility;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anything;
import static org.hamcrest.Matchers.is;
@LargeTest
@RunWith(AndroidJUnit4ClassRunner.class)
public class DetailAndMenuTest {
@Rule
public ActivityTestRule<MainActivity> mActivityTestRule = new ActivityTestRule<>(MainActivity.class);
@Before
public void setup() {
}
@Test
public void testDetail() {
sleep();
openActionBarOverflowOrOptionsMenu(getInstrumentation().getTargetContext());
sleep();
ViewInteraction appCompatTextView = onView(
allOf(withId(R.id.title), withText((R.string.menu_add_manually)),
childAtPosition(
childAtPosition(
withId(R.id.content),
0),
0),
isDisplayed()));
appCompatTextView.perform(click());
sleep();
onView(withId(R.id.editText_secret)).check(matches(isDisplayed()));
onView(withId(R.id.editText_secret)).check(matches(withText(R.string.secret)));
onView(withId(R.id.editText_name)).check(matches(isDisplayed()));
onView(withId(R.id.editText_name)).check(matches(withText(R.string.name)));
onView(withId(R.id.radio_base32)).check(matches(isDisplayed()));
onView(withId(R.id.radio_hex)).check(matches(isDisplayed()));
onView(withId(R.id.radio_base32)).check(matches(withText(R.string.base32_encoded_secret)));
onView(withId(R.id.radio_hex)).check(matches(withText(R.string.hex_encoded_secret)));
onView(withId(R.id.checkBox_pin)).check(matches(isDisplayed()));
onView(withId(R.id.checkBox_pin)).check(matches(withText(R.string.with_pin)));
ViewInteraction textView = onView(
allOf(withId(R.id.label), withText(R.string.digits),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView.check(matches(withText(R.string.digits)));
ViewInteraction textView2 = onView(
allOf(withId(android.R.id.text1), withText("6"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView2.check(matches(withText("6")));
ViewInteraction textView3 = onView(
allOf(withId(R.id.label), withText(R.string.algorithm),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView3.check(matches(withText(R.string.algorithm)));
ViewInteraction textView4 = onView(
allOf(withId(android.R.id.text1), withText("SHA1"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView4.check(matches(withText("SHA1")));
ViewInteraction textView5 = onView(
allOf(withId(R.id.label), withText(R.string.type),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView5.check(matches(withText(R.string.type)));
ViewInteraction textView6 = onView(
allOf(withId(android.R.id.text1), withText("HOTP"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView6.check(matches(withText("HOTP")));
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText.perform(click());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText2.perform(click());
ViewInteraction appCompatEditText3 = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText3.perform(click());
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText4.perform(replaceText("Nam"));
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.editText_name), withText("Nam"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText5.perform(closeSoftKeyboard());
ViewInteraction appCompatEditText6 = onView(
allOf(withId(R.id.editText_secret), withText((R.string.secret)),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
2),
isDisplayed()));
appCompatEditText6.perform(replaceText("AAAA"));
ViewInteraction appCompatEditText7 = onView(
allOf(withId(R.id.editText_secret), withText("AAAA"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
2),
isDisplayed()));
appCompatEditText7.perform(closeSoftKeyboard());
// ViewInteraction appCompatCheckBox = onView(
// allOf(withId(R.id.radio_base32),
// childAtPosition(
// childAtPosition(
// withId(android.R.id.content),
// 0),
// 3),
// isDisplayed()));
// appCompatCheckBox.perform(click());
ViewInteraction appCompatRadioButton = onView(
allOf(withId(R.id.radio_base32), withText("Base32")));
appCompatRadioButton.perform(click());
ViewInteraction appCompatCheckBox2 = onView(
allOf(withId(R.id.checkBox_pin), withText((R.string.with_pin))));
appCompatCheckBox2.perform(click());
//pressBack();
ViewInteraction appCompatSpinner = onView(
allOf(withId(R.id.spinner_row),
hasDescendant(withText("6")),
isDisplayed()));
appCompatSpinner.perform(click());
DataInteraction appCompatTextView2 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(1);
appCompatTextView2.perform(click());
ViewInteraction appCompatSpinner2 = onView(
allOf(withId(R.id.spinner_row), hasDescendant(withText("SHA1")),
isDisplayed()));
appCompatSpinner2.perform(click());
DataInteraction appCompatTextView3 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(2);
appCompatTextView3.perform(click());
ViewInteraction appCompatSpinner3 = onView(
allOf(withId(R.id.spinner_row),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
withClassName(is("androidx.constraintlayout.widget.ConstraintLayout")),
0)),
1),
hasDescendant(withText("HOTP")),
isDisplayed()));
appCompatSpinner3.perform(click());
DataInteraction appCompatTextView4 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(1);
appCompatTextView4.perform(click());
ViewInteraction textView7 = onView(
allOf(withId(R.id.label), withText((R.string.period)),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView7.check(matches(withText((R.string.period))));
ViewInteraction textView8 = onView(
allOf(withId(android.R.id.text1), withText("30s"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView8.check(matches(withText("30s")));
ViewInteraction appCompatSpinner4 = onView(
allOf(withId(R.id.spinner_row),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
withClassName(is("androidx.constraintlayout.widget.ConstraintLayout")),
0)),
1),
hasDescendant(withText("TOTP")),
isDisplayed()));
appCompatSpinner4.perform(click());
DataInteraction appCompatTextView5 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(0);
appCompatTextView5.perform(click());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.button_add), withText("+")));
appCompatButton.perform(click());
sleep();
onView(withText((R.string.tap_to_set_pin))).check(matches(isDisplayed()));
/* ViewInteraction textView10 = onView(
allOf(withId(R.id.textViewLabel), withText("Nam")));
textView10.check(matches(withText("Nam")));*/
sleep();
DataInteraction relativeLayout = onData(anything())
.inAdapterView(allOf(withId(R.id.listview),
childAtPosition(
withId(R.id.main_constraint_layout),
1)))
.atPosition(0);
relativeLayout.perform(click());
sleep();
ViewInteraction textView11 = onView(
allOf(IsInstanceOf.instanceOf(android.widget.TextView.class), withText((R.string.set_new_pin)),
isDisplayed()));
textView11.check(matches(withText((R.string.set_new_pin))));
ViewInteraction editText5 = onView(
allOf(childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
IsInstanceOf.instanceOf(android.widget.FrameLayout.class),
0)),
0),
isDisplayed()));
editText5.check(matches(isDisplayed()));
ViewInteraction button2 = onView(
allOf(withId(android.R.id.button1),
isDisplayed()));
button2.check(matches(isDisplayed()));
ViewInteraction button3 = onView(
allOf(withId(android.R.id.button2),
isDisplayed()));
button3.check(matches(isDisplayed()));
sleep();
ViewInteraction editText6 = onView(
allOf(childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText6.perform(click());
sleep();
ViewInteraction editText7 = onView(
allOf(childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText7.perform(replaceText("5"), closeSoftKeyboard());
sleep();
ViewInteraction appCompatButton2 = onView(
allOf(withId(android.R.id.button1), withText(R.string.button_text_save),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
3)));
appCompatButton2.perform(scrollTo(), click());
sleep();
onView(withId(R.id.textViewToken)).check(matches(withText("6667 4061")));
ViewInteraction textView13 = onView(
allOf(withId(R.id.textViewLabel), withText("Nam"),
childAtPosition(
childAtPosition(
withId(R.id.listview),
0),
2),
isDisplayed()));
textView13.check(matches(withText("Nam")));
ViewInteraction button4 = onView(
allOf(withId(R.id.next_button),
isDisplayed()));
button4.check(matches(isDisplayed()));
sleep();
DataInteraction relativeLayout2 = onData(anything())
.inAdapterView(allOf(withId(R.id.listview),
childAtPosition(
withId(R.id.main_constraint_layout),
1)))
.atPosition(0);
relativeLayout2.perform(longClick());
sleep();
ViewInteraction actionMenuItemView = onView(
allOf(withId(R.id.change_pin2), withContentDescription(R.string.change_pin),
isDisplayed()));
actionMenuItemView.perform(click());
sleep();
ViewInteraction editText8 = onView(
allOf(childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
0),
isDisplayed()));
editText8.perform(click());
sleep();
ViewInteraction editText9 = onView(
allOf(childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
0),
isDisplayed()));
editText9.perform(replaceText("8"), closeSoftKeyboard());
sleep();
ViewInteraction editText10 = onView(
allOf(childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
1),
isDisplayed()));
editText10.perform(replaceText("8"), closeSoftKeyboard());
sleep();
ViewInteraction textView14 = onView(
allOf(IsInstanceOf.instanceOf(android.widget.TextView.class), withText(R.string.change_pin),
isDisplayed()));
textView14.check(matches(withText(R.string.change_pin)));
ViewInteraction editText11 = onView(
allOf(withText("•"),
childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
0),
isDisplayed()));
editText11.check(matches(isDisplayed()));
ViewInteraction editText12 = onView(
allOf(withText("•"),
childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
1),
isDisplayed()));
editText12.check(matches(isDisplayed()));
ViewInteraction button5 = onView(
allOf(withId(android.R.id.button1),
isDisplayed()));
button5.check(matches(isDisplayed()));
ViewInteraction button6 = onView(
allOf(withId(android.R.id.button2),
isDisplayed()));
button6.check(matches(isDisplayed()));
sleep();
ViewInteraction appCompatButton3 = onView(
allOf(withId(android.R.id.button1), withText(R.string.button_text_save),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
3)));
appCompatButton3.perform(scrollTo(), click());
sleep();
DataInteraction relativeLayout3 = onData(anything())
.inAdapterView(allOf(withId(R.id.listview),
childAtPosition(
withId(R.id.main_constraint_layout),
1)))
.atPosition(0);
relativeLayout3.perform(longClick());
sleep();
ViewInteraction textView15 = onView(
allOf(withId(R.id.change_pin2), withContentDescription(R.string.change_pin),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
0),
isDisplayed()));
textView15.check(matches(isDisplayed()));
ViewInteraction textView16 = onView(
allOf(withId(R.id.edit_token2), withContentDescription(R.string.rename),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
1),
isDisplayed()));
textView16.check(matches(isDisplayed()));
ViewInteraction textView17 = onView(
allOf(withId(R.id.delete_token2), withContentDescription("Item"),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
2),
isDisplayed()));
textView17.check(matches(isDisplayed()));
ViewInteraction imageView2 = onView(
allOf(childAtPosition(
allOf(withId(R.id.toolbar),
childAtPosition(
withId(R.id.main_constraint_layout),
0)),
0),
isDisplayed()));
imageView2.check(matches(isDisplayed()));
sleep();
sleep();
ViewInteraction actionMenuItemView3 = onView(
allOf(withId(R.id.edit_token2), withContentDescription(R.string.rename),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
1),
isDisplayed()));
actionMenuItemView3.perform(click());
sleep();
ViewInteraction editText13 = onView(
allOf(withText("Nam"),
isDisplayed()));
editText13.check(matches(isDisplayed()));
ViewInteraction button7 = onView(
allOf(withId(android.R.id.button2),
isDisplayed()));
button7.check(matches(isDisplayed()));
ViewInteraction editText14 = onView(
allOf(withText("Nam"),
isDisplayed()));
editText14.check(matches(withText("Nam")));
ViewInteraction button8 = onView(
allOf(withId(android.R.id.button1),
isDisplayed()));
button8.check(matches(isDisplayed()));
sleep();
ViewInteraction editText15 = onView(
allOf(withText("Nam"),
isDisplayed()));
editText15.perform(click());
sleep();
ViewInteraction editText16 = onView(
allOf(withText("Nam"),
childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText16.perform(replaceText("Name"));
ViewInteraction editText17 = onView(
allOf(withText("Name"),
childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText17.perform(closeSoftKeyboard());
sleep();
ViewInteraction appCompatButton4 = onView(
allOf(withId(android.R.id.button1), withText(R.string.button_text_save),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
3)));
appCompatButton4.perform(scrollTo(), click());
sleep();
sleep();
onView(withId(R.id.textViewLabel)).check(matches(withText("Name")));
sleep();
openActionBarOverflowOrOptionsMenu(getInstrumentation().getTargetContext());
sleep();
ViewInteraction appCompatTextView6 = onView(
allOf(withId(R.id.title), withText(R.string.menu_add_manually),
childAtPosition(
childAtPosition(
withId(R.id.content),
0),
0),
isDisplayed()));
appCompatTextView6.perform(click());
sleep();
ViewInteraction appCompatSpinner5 = onView(
allOf(withId(R.id.spinner_row), hasDescendant(withText("HOTP")),
isDisplayed()));
appCompatSpinner5.perform(click());
DataInteraction appCompatTextView7 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(1);
appCompatTextView7.perform(click());
ViewInteraction appCompatButton6 = onView(
allOf(withId(R.id.button_add), withText("+")));
appCompatButton6.perform(click());
sleep();
onView(allOf(withId(R.id.progressBar), withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE))).check(matches(isDisplayed()));
}
private String getString(int resID) {
return InstrumentationRegistry.getInstrumentation().getTargetContext().getString(resID);
}
private void sleep() {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static Matcher<View> childAtPosition(
final Matcher<View> parentMatcher, final int position) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("Child at position " + position + " in parent ");
parentMatcher.describeTo(description);
}
@Override
public boolean matchesSafely(View view) {
ViewParent parent = view.getParent();
return parent instanceof ViewGroup && parentMatcher.matches(parent)
&& view.equals(((ViewGroup) parent).getChildAt(position));
}
};
}
}
|
app/src/androidTest/java/it/netknights/piauthenticator/DetailAndMenuTest.java
|
/*
privacyIDEA Authenticator
Authors: Nils Behlen <nils.behlen@netknights.it>
Copyright (c) 2017-2019 NetKnights GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package it.netknights.piauthenticator;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import androidx.test.espresso.DataInteraction;
import androidx.test.espresso.ViewInteraction;
import androidx.test.espresso.matcher.ViewMatchers;
import androidx.test.filters.LargeTest;
import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.rule.ActivityTestRule;
import it.netknights.piauthenticator.viewcontroller.MainActivity;
import static androidx.test.espresso.Espresso.onData;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.Espresso.openActionBarOverflowOrOptionsMenu;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.closeSoftKeyboard;
import static androidx.test.espresso.action.ViewActions.longClick;
import static androidx.test.espresso.action.ViewActions.replaceText;
import static androidx.test.espresso.action.ViewActions.scrollTo;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.hasDescendant;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withClassName;
import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription;
import static androidx.test.espresso.matcher.ViewMatchers.withEffectiveVisibility;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anything;
import static org.hamcrest.Matchers.is;
@LargeTest
@RunWith(AndroidJUnit4ClassRunner.class)
public class DetailAndMenuTest {
@Rule
public ActivityTestRule<MainActivity> mActivityTestRule = new ActivityTestRule<>(MainActivity.class);
@Before
public void setup() {
}
@Test
public void testDetail() {
sleep();
openActionBarOverflowOrOptionsMenu(getInstrumentation().getTargetContext());
sleep();
ViewInteraction appCompatTextView = onView(
allOf(withId(R.id.title), withText((R.string.menu_add_manually)),
childAtPosition(
childAtPosition(
withId(R.id.content),
0),
0),
isDisplayed()));
appCompatTextView.perform(click());
sleep();
onView(withId(R.id.editText_secret)).check(matches(isDisplayed()));
onView(withId(R.id.editText_secret)).check(matches(withText(R.string.secret)));
onView(withId(R.id.editText_name)).check(matches(isDisplayed()));
onView(withId(R.id.editText_name)).check(matches(withText(R.string.name)));
onView(withId(R.id.radio_base32)).check(matches(isDisplayed()));
onView(withId(R.id.radio_hex)).check(matches(isDisplayed()));
onView(withId(R.id.radio_base32)).check(matches(withText(R.string.base32_encoded_secret)));
onView(withId(R.id.radio_hex)).check(matches(withText(R.string.hex_encoded_secret)));
onView(withId(R.id.checkBox_pin)).check(matches(isDisplayed()));
onView(withId(R.id.checkBox_pin)).check(matches(withText(R.string.with_pin)));
ViewInteraction textView = onView(
allOf(withId(R.id.label), withText(R.string.digits),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView.check(matches(withText(R.string.digits)));
ViewInteraction textView2 = onView(
allOf(withId(android.R.id.text1), withText("6"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView2.check(matches(withText("6")));
ViewInteraction textView3 = onView(
allOf(withId(R.id.label), withText(R.string.algorithm),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView3.check(matches(withText(R.string.algorithm)));
ViewInteraction textView4 = onView(
allOf(withId(android.R.id.text1), withText("SHA1"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView4.check(matches(withText("SHA1")));
ViewInteraction textView5 = onView(
allOf(withId(R.id.label), withText(R.string.type),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView5.check(matches(withText(R.string.type)));
ViewInteraction textView6 = onView(
allOf(withId(android.R.id.text1), withText("HOTP"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView6.check(matches(withText("HOTP")));
ViewInteraction appCompatEditText = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText.perform(click());
ViewInteraction appCompatEditText2 = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText2.perform(click());
ViewInteraction appCompatEditText3 = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText3.perform(click());
ViewInteraction appCompatEditText4 = onView(
allOf(withId(R.id.editText_name), withText("Name"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText4.perform(replaceText("Nam"));
ViewInteraction appCompatEditText5 = onView(
allOf(withId(R.id.editText_name), withText("Nam"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
1),
isDisplayed()));
appCompatEditText5.perform(closeSoftKeyboard());
ViewInteraction appCompatEditText6 = onView(
allOf(withId(R.id.editText_secret), withText((R.string.secret)),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
2),
isDisplayed()));
appCompatEditText6.perform(replaceText("AAAA"));
ViewInteraction appCompatEditText7 = onView(
allOf(withId(R.id.editText_secret), withText("AAAA"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
2),
isDisplayed()));
appCompatEditText7.perform(closeSoftKeyboard());
// ViewInteraction appCompatCheckBox = onView(
// allOf(withId(R.id.radio_base32),
// childAtPosition(
// childAtPosition(
// withId(android.R.id.content),
// 0),
// 3),
// isDisplayed()));
// appCompatCheckBox.perform(click());
ViewInteraction appCompatRadioButton = onView(
allOf(withId(R.id.radio_base32), withText("Base32")));
appCompatRadioButton.perform(click());
ViewInteraction appCompatCheckBox2 = onView(
allOf(withId(R.id.checkBox_pin), withText((R.string.with_pin))));
appCompatCheckBox2.perform(click());
//pressBack();
ViewInteraction appCompatSpinner = onView(
allOf(withId(R.id.spinner_row),
hasDescendant(withText("6")),
isDisplayed()));
appCompatSpinner.perform(click());
DataInteraction appCompatTextView2 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(1);
appCompatTextView2.perform(click());
ViewInteraction appCompatSpinner2 = onView(
allOf(withId(R.id.spinner_row), hasDescendant(withText("SHA1")),
isDisplayed()));
appCompatSpinner2.perform(click());
DataInteraction appCompatTextView3 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(2);
appCompatTextView3.perform(click());
ViewInteraction appCompatSpinner3 = onView(
allOf(withId(R.id.spinner_row),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
withClassName(is("androidx.constraintlayout.widget.ConstraintLayout")),
0)),
1),
hasDescendant(withText("HOTP")),
isDisplayed()));
appCompatSpinner3.perform(click());
DataInteraction appCompatTextView4 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(1);
appCompatTextView4.perform(click());
ViewInteraction textView7 = onView(
allOf(withId(R.id.label), withText((R.string.period)),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
IsInstanceOf.instanceOf(android.view.ViewGroup.class),
0)),
0),
isDisplayed()));
textView7.check(matches(withText((R.string.period))));
ViewInteraction textView8 = onView(
allOf(withId(android.R.id.text1), withText("30s"),
childAtPosition(
allOf(withId(R.id.spinner_row),
childAtPosition(
withId(R.id.tableRow),
1)),
0),
isDisplayed()));
textView8.check(matches(withText("30s")));
ViewInteraction appCompatSpinner4 = onView(
allOf(withId(R.id.spinner_row),
childAtPosition(
allOf(withId(R.id.tableRow),
childAtPosition(
withClassName(is("androidx.constraintlayout.widget.ConstraintLayout")),
0)),
1),
hasDescendant(withText("TOTP")),
isDisplayed()));
appCompatSpinner4.perform(click());
DataInteraction appCompatTextView5 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(0);
appCompatTextView5.perform(click());
ViewInteraction appCompatButton = onView(
allOf(withId(R.id.button_add), withText("+")));
appCompatButton.perform(click());
sleep();
onView(withText((R.string.tap_to_set_pin))).check(matches(isDisplayed()));
/* ViewInteraction textView10 = onView(
allOf(withId(R.id.textViewLabel), withText("Nam")));
textView10.check(matches(withText("Nam")));*/
sleep();
DataInteraction relativeLayout = onData(anything())
.inAdapterView(allOf(withId(R.id.listview),
childAtPosition(
withId(R.id.main_constraint_layout),
1)))
.atPosition(0);
relativeLayout.perform(click());
sleep();
ViewInteraction textView11 = onView(
allOf(IsInstanceOf.instanceOf(android.widget.TextView.class), withText((R.string.set_new_pin)),
isDisplayed()));
textView11.check(matches(withText((R.string.set_new_pin))));
ViewInteraction editText5 = onView(
allOf(childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
IsInstanceOf.instanceOf(android.widget.FrameLayout.class),
0)),
0),
isDisplayed()));
editText5.check(matches(isDisplayed()));
ViewInteraction button2 = onView(
allOf(withId(android.R.id.button1),
isDisplayed()));
button2.check(matches(isDisplayed()));
ViewInteraction button3 = onView(
allOf(withId(android.R.id.button2),
isDisplayed()));
button3.check(matches(isDisplayed()));
sleep();
ViewInteraction editText6 = onView(
allOf(childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText6.perform(click());
sleep();
ViewInteraction editText7 = onView(
allOf(childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText7.perform(replaceText("5"), closeSoftKeyboard());
sleep();
ViewInteraction appCompatButton2 = onView(
allOf(withId(android.R.id.button1), withText(R.string.button_text_save),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
3)));
appCompatButton2.perform(scrollTo(), click());
sleep();
onView(withId(R.id.textViewToken)).check(matches(withText("6667 4061")));
ViewInteraction textView13 = onView(
allOf(withId(R.id.textViewLabel), withText("Nam"),
childAtPosition(
childAtPosition(
withId(R.id.listview),
0),
2),
isDisplayed()));
textView13.check(matches(withText("Nam")));
ViewInteraction button4 = onView(
allOf(withId(R.id.next_button),
isDisplayed()));
button4.check(matches(isDisplayed()));
sleep();
DataInteraction relativeLayout2 = onData(anything())
.inAdapterView(allOf(withId(R.id.listview),
childAtPosition(
withId(R.id.main_constraint_layout),
1)))
.atPosition(0);
relativeLayout2.perform(longClick());
sleep();
ViewInteraction actionMenuItemView = onView(
allOf(withId(R.id.change_pin2), withContentDescription(R.string.change_pin),
isDisplayed()));
actionMenuItemView.perform(click());
sleep();
ViewInteraction editText8 = onView(
allOf(childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
0),
isDisplayed()));
editText8.perform(click());
sleep();
ViewInteraction editText9 = onView(
allOf(childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
0),
isDisplayed()));
editText9.perform(replaceText("8"), closeSoftKeyboard());
sleep();
ViewInteraction editText10 = onView(
allOf(childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
1),
isDisplayed()));
editText10.perform(replaceText("8"), closeSoftKeyboard());
sleep();
ViewInteraction textView14 = onView(
allOf(IsInstanceOf.instanceOf(android.widget.TextView.class), withText(R.string.change_pin),
isDisplayed()));
textView14.check(matches(withText(R.string.change_pin)));
ViewInteraction editText11 = onView(
allOf(withText("•"),
childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
0),
isDisplayed()));
editText11.check(matches(isDisplayed()));
ViewInteraction editText12 = onView(
allOf(withText("•"),
childAtPosition(
childAtPosition(
withId(android.R.id.custom),
0),
1),
isDisplayed()));
editText12.check(matches(isDisplayed()));
ViewInteraction button5 = onView(
allOf(withId(android.R.id.button1),
isDisplayed()));
button5.check(matches(isDisplayed()));
ViewInteraction button6 = onView(
allOf(withId(android.R.id.button2),
isDisplayed()));
button6.check(matches(isDisplayed()));
sleep();
ViewInteraction appCompatButton3 = onView(
allOf(withId(android.R.id.button1), withText(R.string.button_text_save),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
3)));
appCompatButton3.perform(scrollTo(), click());
sleep();
DataInteraction relativeLayout3 = onData(anything())
.inAdapterView(allOf(withId(R.id.listview),
childAtPosition(
withId(R.id.main_constraint_layout),
1)))
.atPosition(0);
relativeLayout3.perform(longClick());
sleep();
ViewInteraction textView15 = onView(
allOf(withId(R.id.change_pin2), withContentDescription(R.string.change_pin),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
0),
isDisplayed()));
textView15.check(matches(isDisplayed()));
ViewInteraction textView16 = onView(
allOf(withId(R.id.edit_token2), withContentDescription(R.string.rename),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
1),
isDisplayed()));
textView16.check(matches(isDisplayed()));
ViewInteraction textView17 = onView(
allOf(withId(R.id.delete_token2), withContentDescription("Item"),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
2),
isDisplayed()));
textView17.check(matches(isDisplayed()));
ViewInteraction imageView2 = onView(
allOf(childAtPosition(
allOf(withId(R.id.toolbar),
childAtPosition(
withId(R.id.main_constraint_layout),
0)),
0),
isDisplayed()));
imageView2.check(matches(isDisplayed()));
sleep();
sleep();
ViewInteraction actionMenuItemView3 = onView(
allOf(withId(R.id.edit_token2), withContentDescription(R.string.rename),
childAtPosition(
childAtPosition(
withId(R.id.action_mode_bar),
2),
1),
isDisplayed()));
actionMenuItemView3.perform(click());
sleep();
ViewInteraction editText13 = onView(
allOf(withText("Nam"),
isDisplayed()));
editText13.check(matches(isDisplayed()));
ViewInteraction button7 = onView(
allOf(withId(android.R.id.button2),
isDisplayed()));
button7.check(matches(isDisplayed()));
ViewInteraction editText14 = onView(
allOf(withText("Nam"),
isDisplayed()));
editText14.check(matches(withText("Nam")));
ViewInteraction button8 = onView(
allOf(withId(android.R.id.button1),
isDisplayed()));
button8.check(matches(isDisplayed()));
sleep();
ViewInteraction editText15 = onView(
allOf(withText("Nam"),
isDisplayed()));
editText15.perform(click());
sleep();
ViewInteraction editText16 = onView(
allOf(withText("Nam"),
childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText16.perform(replaceText("Name"));
ViewInteraction editText17 = onView(
allOf(withText("Name"),
childAtPosition(
allOf(withId(android.R.id.custom),
childAtPosition(
withClassName(is("android.widget.FrameLayout")),
0)),
0),
isDisplayed()));
editText17.perform(closeSoftKeyboard());
sleep();
ViewInteraction appCompatButton4 = onView(
allOf(withId(android.R.id.button1), withText(R.string.button_text_save),
childAtPosition(
childAtPosition(
withClassName(is("android.widget.ScrollView")),
0),
3)));
appCompatButton4.perform(scrollTo(), click());
sleep();
sleep();
onView(withId(R.id.textViewLabel)).check(matches(withText("Name")));
sleep();
openActionBarOverflowOrOptionsMenu(getInstrumentation().getTargetContext());
sleep();
ViewInteraction appCompatTextView6 = onView(
allOf(withId(R.id.title), withText(R.string.menu_add_manually),
childAtPosition(
childAtPosition(
withId(R.id.content),
0),
0),
isDisplayed()));
appCompatTextView6.perform(click());
sleep();
ViewInteraction appCompatSpinner5 = onView(
allOf(withId(R.id.spinner_row), hasDescendant(withText("HOTP")),
isDisplayed()));
appCompatSpinner5.perform(click());
DataInteraction appCompatTextView7 = onData(anything())
.inAdapterView(childAtPosition(
withClassName(is("android.widget.PopupWindow$PopupBackgroundView")),
0))
.atPosition(1);
appCompatTextView7.perform(click());
ViewInteraction appCompatButton6 = onView(
allOf(withId(R.id.button_add), withText("+"),
childAtPosition(
childAtPosition(
withId(android.R.id.content),
0),
5),
isDisplayed()));
appCompatButton6.perform(click());
sleep();
onView(allOf(withId(R.id.progressBar), withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE))).check(matches(isDisplayed()));
}
private String getString(int resID) {
return InstrumentationRegistry.getInstrumentation().getTargetContext().getString(resID);
}
private void sleep() {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static Matcher<View> childAtPosition(
final Matcher<View> parentMatcher, final int position) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("Child at position " + position + " in parent ");
parentMatcher.describeTo(description);
}
@Override
public boolean matchesSafely(View view) {
ViewParent parent = view.getParent();
return parent instanceof ViewGroup && parentMatcher.matches(parent)
&& view.equals(((ViewGroup) parent).getChildAt(position));
}
};
}
}
|
fixed another line in test
|
app/src/androidTest/java/it/netknights/piauthenticator/DetailAndMenuTest.java
|
fixed another line in test
|
|
Java
|
apache-2.0
|
4649c1517b7ca5406218d9e145f601346e3e5d24
| 0
|
codetojoy/easter_eggs_for_git,codetojoy/easter_eggs_for_git
|
// TODO: add copyright notice
package net.codetojoy.example;
public class User {
private String name;
private int age;
public User(String name, int age) {
this.name = name;
this.age = age;
}
public String getName() { return name; }
public int getAge() { return age; }
}
|
egg_basic_2_commits_no_conflict/src/main/java/net/codetojoy/example/User.java
|
package net.codetojoy.example;
public class User {
private String name;
private int age;
public User(String name, int age) {
this.name = name;
this.age = age;
}
public String getName() { return name; }
public int getAge() { return age; }
}
|
incremental change as part of a larger frenzy of activity
|
egg_basic_2_commits_no_conflict/src/main/java/net/codetojoy/example/User.java
|
incremental change as part of a larger frenzy of activity
|
|
Java
|
apache-2.0
|
9f306339217d77d09c1089345514359577d066f5
| 0
|
yangdd1205/spring-boot,eddumelendez/spring-boot,herau/spring-boot,htynkn/spring-boot,habuma/spring-boot,i007422/jenkins2-course-spring-boot,nebhale/spring-boot,philwebb/spring-boot,herau/spring-boot,drumonii/spring-boot,i007422/jenkins2-course-spring-boot,xiaoleiPENG/my-project,yhj630520/spring-boot,mdeinum/spring-boot,lexandro/spring-boot,ollie314/spring-boot,dreis2211/spring-boot,pvorb/spring-boot,spring-projects/spring-boot,bjornlindstrom/spring-boot,rweisleder/spring-boot,felipeg48/spring-boot,lburgazzoli/spring-boot,kdvolder/spring-boot,mbenson/spring-boot,rajendra-chola/jenkins2-course-spring-boot,joshiste/spring-boot,hqrt/jenkins2-course-spring-boot,akmaharshi/jenkins,pvorb/spring-boot,DeezCashews/spring-boot,donhuvy/spring-boot,kamilszymanski/spring-boot,vakninr/spring-boot,royclarkson/spring-boot,spring-projects/spring-boot,Buzzardo/spring-boot,bijukunjummen/spring-boot,dreis2211/spring-boot,nebhale/spring-boot,qerub/spring-boot,deki/spring-boot,DeezCashews/spring-boot,brettwooldridge/spring-boot,royclarkson/spring-boot,olivergierke/spring-boot,isopov/spring-boot,scottfrederick/spring-boot,vpavic/spring-boot,i007422/jenkins2-course-spring-boot,michael-simons/spring-boot,ilayaperumalg/spring-boot,bbrouwer/spring-boot,joshthornhill/spring-boot,mbogoevici/spring-boot,rweisleder/spring-boot,javyzheng/spring-boot,mosoft521/spring-boot,chrylis/spring-boot,nebhale/spring-boot,qerub/spring-boot,mbenson/spring-boot,ptahchiev/spring-boot,Nowheresly/spring-boot,jxblum/spring-boot,Buzzardo/spring-boot,RichardCSantana/spring-boot,NetoDevel/spring-boot,pvorb/spring-boot,brettwooldridge/spring-boot,zhanhb/spring-boot,zhanhb/spring-boot,htynkn/spring-boot,yhj630520/spring-boot,mdeinum/spring-boot,afroje-reshma/spring-boot-sample,mdeinum/spring-boot,jayarampradhan/spring-boot,eddumelendez/spring-boot,chrylis/spring-boot,rweisleder/spring-boot,bbrouwer/spring-boot,bjornlindstrom/spring-boot,wilkinsona/spring-boot,cleverjava/jenkins2-course-spring-boot,Buzzardo/spring-boot,ihoneymon/spring-boot,donhuvy/spring-boot,linead/spring-boot,michael-simons/spring-boot,lucassaldanha/spring-boot,vakninr/spring-boot,kdvolder/spring-boot,tsachev/spring-boot,DeezCashews/spring-boot,javyzheng/spring-boot,scottfrederick/spring-boot,sbcoba/spring-boot,DeezCashews/spring-boot,aahlenst/spring-boot,aahlenst/spring-boot,candrews/spring-boot,ilayaperumalg/spring-boot,NetoDevel/spring-boot,felipeg48/spring-boot,sebastiankirsch/spring-boot,bijukunjummen/spring-boot,lburgazzoli/spring-boot,lexandro/spring-boot,royclarkson/spring-boot,michael-simons/spring-boot,felipeg48/spring-boot,NetoDevel/spring-boot,mbenson/spring-boot,sebastiankirsch/spring-boot,pvorb/spring-boot,rweisleder/spring-boot,bjornlindstrom/spring-boot,aahlenst/spring-boot,linead/spring-boot,dreis2211/spring-boot,aahlenst/spring-boot,donhuvy/spring-boot,hello2009chen/spring-boot,ihoneymon/spring-boot,joshiste/spring-boot,yhj630520/spring-boot,bclozel/spring-boot,hello2009chen/spring-boot,RichardCSantana/spring-boot,ihoneymon/spring-boot,deki/spring-boot,jxblum/spring-boot,shangyi0102/spring-boot,NetoDevel/spring-boot,kamilszymanski/spring-boot,yhj630520/spring-boot,rajendra-chola/jenkins2-course-spring-boot,zhanhb/spring-boot,cleverjava/jenkins2-course-spring-boot,Nowheresly/spring-boot,akmaharshi/jenkins,SaravananParthasarathy/SPSDemo,royclarkson/spring-boot,lburgazzoli/spring-boot,herau/spring-boot,Nowheresly/spring-boot,donhuvy/spring-boot,bijukunjummen/spring-boot,hqrt/jenkins2-course-spring-boot,tiarebalbi/spring-boot,rweisleder/spring-boot,bijukunjummen/spring-boot,jvz/spring-boot,SaravananParthasarathy/SPSDemo,ilayaperumalg/spring-boot,mosoft521/spring-boot,ollie314/spring-boot,tsachev/spring-boot,ollie314/spring-boot,aahlenst/spring-boot,chrylis/spring-boot,Buzzardo/spring-boot,qerub/spring-boot,tsachev/spring-boot,tsachev/spring-boot,sbcoba/spring-boot,xiaoleiPENG/my-project,ptahchiev/spring-boot,habuma/spring-boot,candrews/spring-boot,minmay/spring-boot,eddumelendez/spring-boot,habuma/spring-boot,spring-projects/spring-boot,mevasaroj/jenkins2-course-spring-boot,joshiste/spring-boot,ihoneymon/spring-boot,Nowheresly/spring-boot,bbrouwer/spring-boot,isopov/spring-boot,mbenson/spring-boot,mbenson/spring-boot,bijukunjummen/spring-boot,spring-projects/spring-boot,afroje-reshma/spring-boot-sample,vpavic/spring-boot,jbovet/spring-boot,shangyi0102/spring-boot,brettwooldridge/spring-boot,lburgazzoli/spring-boot,javyzheng/spring-boot,ptahchiev/spring-boot,chrylis/spring-boot,sbcoba/spring-boot,donhuvy/spring-boot,zhanhb/spring-boot,spring-projects/spring-boot,jbovet/spring-boot,philwebb/spring-boot,ptahchiev/spring-boot,philwebb/spring-boot,hqrt/jenkins2-course-spring-boot,jxblum/spring-boot,mbogoevici/spring-boot,Nowheresly/spring-boot,shakuzen/spring-boot,xiaoleiPENG/my-project,lburgazzoli/spring-boot,cleverjava/jenkins2-course-spring-boot,pvorb/spring-boot,vpavic/spring-boot,rajendra-chola/jenkins2-course-spring-boot,philwebb/spring-boot,kdvolder/spring-boot,mevasaroj/jenkins2-course-spring-boot,habuma/spring-boot,tsachev/spring-boot,vakninr/spring-boot,philwebb/spring-boot,scottfrederick/spring-boot,mdeinum/spring-boot,hqrt/jenkins2-course-spring-boot,drumonii/spring-boot,joshiste/spring-boot,shangyi0102/spring-boot,felipeg48/spring-boot,candrews/spring-boot,kdvolder/spring-boot,scottfrederick/spring-boot,bclozel/spring-boot,lexandro/spring-boot,jayarampradhan/spring-boot,vpavic/spring-boot,herau/spring-boot,SaravananParthasarathy/SPSDemo,linead/spring-boot,aahlenst/spring-boot,afroje-reshma/spring-boot-sample,nebhale/spring-boot,tiarebalbi/spring-boot,drumonii/spring-boot,afroje-reshma/spring-boot-sample,NetoDevel/spring-boot,mbogoevici/spring-boot,mbogoevici/spring-boot,bbrouwer/spring-boot,mosoft521/spring-boot,isopov/spring-boot,joshiste/spring-boot,olivergierke/spring-boot,qerub/spring-boot,akmaharshi/jenkins,ihoneymon/spring-boot,isopov/spring-boot,DeezCashews/spring-boot,rajendra-chola/jenkins2-course-spring-boot,lucassaldanha/spring-boot,xiaoleiPENG/my-project,dreis2211/spring-boot,mevasaroj/jenkins2-course-spring-boot,jvz/spring-boot,scottfrederick/spring-boot,qerub/spring-boot,habuma/spring-boot,akmaharshi/jenkins,sbcoba/spring-boot,minmay/spring-boot,RichardCSantana/spring-boot,habuma/spring-boot,deki/spring-boot,jxblum/spring-boot,jbovet/spring-boot,kdvolder/spring-boot,shakuzen/spring-boot,bjornlindstrom/spring-boot,yangdd1205/spring-boot,michael-simons/spring-boot,yhj630520/spring-boot,mevasaroj/jenkins2-course-spring-boot,SaravananParthasarathy/SPSDemo,ollie314/spring-boot,mosoft521/spring-boot,lucassaldanha/spring-boot,philwebb/spring-boot,ptahchiev/spring-boot,deki/spring-boot,joshthornhill/spring-boot,bbrouwer/spring-boot,ilayaperumalg/spring-boot,htynkn/spring-boot,lexandro/spring-boot,htynkn/spring-boot,joshthornhill/spring-boot,htynkn/spring-boot,ihoneymon/spring-boot,hello2009chen/spring-boot,RichardCSantana/spring-boot,jvz/spring-boot,brettwooldridge/spring-boot,jvz/spring-boot,vpavic/spring-boot,htynkn/spring-boot,Buzzardo/spring-boot,cleverjava/jenkins2-course-spring-boot,RichardCSantana/spring-boot,kamilszymanski/spring-boot,rajendra-chola/jenkins2-course-spring-boot,sebastiankirsch/spring-boot,wilkinsona/spring-boot,jayarampradhan/spring-boot,vakninr/spring-boot,deki/spring-boot,mbenson/spring-boot,wilkinsona/spring-boot,cleverjava/jenkins2-course-spring-boot,spring-projects/spring-boot,tiarebalbi/spring-boot,scottfrederick/spring-boot,minmay/spring-boot,joshthornhill/spring-boot,kamilszymanski/spring-boot,felipeg48/spring-boot,shakuzen/spring-boot,i007422/jenkins2-course-spring-boot,minmay/spring-boot,bclozel/spring-boot,shangyi0102/spring-boot,drumonii/spring-boot,drumonii/spring-boot,sbcoba/spring-boot,linead/spring-boot,michael-simons/spring-boot,mdeinum/spring-boot,linead/spring-boot,hqrt/jenkins2-course-spring-boot,chrylis/spring-boot,afroje-reshma/spring-boot-sample,xiaoleiPENG/my-project,lucassaldanha/spring-boot,michael-simons/spring-boot,eddumelendez/spring-boot,jvz/spring-boot,ilayaperumalg/spring-boot,vpavic/spring-boot,mevasaroj/jenkins2-course-spring-boot,lexandro/spring-boot,javyzheng/spring-boot,sebastiankirsch/spring-boot,shakuzen/spring-boot,tiarebalbi/spring-boot,sebastiankirsch/spring-boot,rweisleder/spring-boot,zhanhb/spring-boot,bjornlindstrom/spring-boot,wilkinsona/spring-boot,mosoft521/spring-boot,dreis2211/spring-boot,olivergierke/spring-boot,vakninr/spring-boot,lucassaldanha/spring-boot,wilkinsona/spring-boot,jayarampradhan/spring-boot,tsachev/spring-boot,shakuzen/spring-boot,bclozel/spring-boot,herau/spring-boot,hello2009chen/spring-boot,drumonii/spring-boot,eddumelendez/spring-boot,ollie314/spring-boot,dreis2211/spring-boot,hello2009chen/spring-boot,wilkinsona/spring-boot,jbovet/spring-boot,SaravananParthasarathy/SPSDemo,tiarebalbi/spring-boot,candrews/spring-boot,donhuvy/spring-boot,kamilszymanski/spring-boot,zhanhb/spring-boot,ptahchiev/spring-boot,joshiste/spring-boot,felipeg48/spring-boot,joshthornhill/spring-boot,olivergierke/spring-boot,royclarkson/spring-boot,shakuzen/spring-boot,mdeinum/spring-boot,akmaharshi/jenkins,Buzzardo/spring-boot,nebhale/spring-boot,chrylis/spring-boot,i007422/jenkins2-course-spring-boot,shangyi0102/spring-boot,isopov/spring-boot,minmay/spring-boot,jayarampradhan/spring-boot,kdvolder/spring-boot,candrews/spring-boot,jxblum/spring-boot,mbogoevici/spring-boot,brettwooldridge/spring-boot,ilayaperumalg/spring-boot,yangdd1205/spring-boot,isopov/spring-boot,bclozel/spring-boot,bclozel/spring-boot,jbovet/spring-boot,javyzheng/spring-boot,eddumelendez/spring-boot,olivergierke/spring-boot,jxblum/spring-boot,tiarebalbi/spring-boot
|
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import sampleconfig.MyComponentInPackageWithoutDot;
import org.springframework.boot.sampleconfig.MyComponent;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.core.io.ClassPathResource;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link BeanDefinitionLoader}.
*
* @author Phillip Webb
*/
public class BeanDefinitionLoaderTests {
private StaticApplicationContext registry;
@Before
public void setup() {
this.registry = new StaticApplicationContext();
}
@After
public void cleanUp() {
this.registry.close();
}
@Test
public void loadClass() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadXmlResource() throws Exception {
ClassPathResource resource = new ClassPathResource("sample-beans.xml",
getClass());
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry, resource);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myXmlComponent")).isTrue();
}
@Test
public void loadGroovyResource() throws Exception {
ClassPathResource resource = new ClassPathResource("sample-beans.groovy",
getClass());
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry, resource);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myGroovyComponent")).isTrue();
}
@Test
public void loadGroovyResourceWithNamespace() throws Exception {
ClassPathResource resource = new ClassPathResource("sample-namespace.groovy",
getClass());
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry, resource);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myGroovyComponent")).isTrue();
}
@Test
public void loadPackage() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getPackage());
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadClassName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getName());
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadResourceName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
"classpath:org/springframework/boot/sample-beans.xml");
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myXmlComponent")).isTrue();
}
@Test
public void loadGroovyName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
"classpath:org/springframework/boot/sample-beans.groovy");
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myGroovyComponent")).isTrue();
}
@Test
public void loadPackageName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getPackage().getName());
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadPackageNameWithoutDot() throws Exception {
// See gh-6126
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponentInPackageWithoutDot.class.getPackage().getName());
int loaded = loader.load();
assertThat(loaded).isEqualTo(1);
assertThat(this.registry.containsBean("myComponentInPackageWithoutDot")).isTrue();
}
@Test
public void loadPackageAndClassDoesNotDoubleAdd() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getPackage(), MyComponent.class);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
}
|
spring-boot/src/test/java/org/springframework/boot/BeanDefinitionLoaderTests.java
|
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import sampleconfig.MyComponentInPackageWithoutDot;
import org.springframework.boot.sampleconfig.MyComponent;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.core.io.ClassPathResource;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link BeanDefinitionLoader}.
*
* @author Phillip Webb
*/
public class BeanDefinitionLoaderTests {
private StaticApplicationContext registry;
@Before
public void setup() {
this.registry = new StaticApplicationContext();
}
@After
public void cleanUp() {
this.registry.close();
}
@Test
public void loadClass() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadXmlResource() throws Exception {
ClassPathResource resource = new ClassPathResource("sample-beans.xml",
getClass());
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry, resource);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myXmlComponent")).isTrue();
}
@Test
public void loadGroovyResource() throws Exception {
ClassPathResource resource = new ClassPathResource("sample-beans.groovy",
getClass());
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry, resource);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myGroovyComponent")).isTrue();
}
@Test
public void loadGroovyResourceWithNamespace() throws Exception {
ClassPathResource resource = new ClassPathResource("sample-namespace.groovy",
getClass());
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry, resource);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myGroovyComponent")).isTrue();
}
@Test
public void loadPackage() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getPackage());
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadClassName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getName());
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadResourceName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
"classpath:org/springframework/boot/sample-beans.xml");
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myXmlComponent")).isTrue();
}
@Test
public void loadGroovyName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
"classpath:org/springframework/boot/sample-beans.groovy");
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myGroovyComponent")).isTrue();
}
@Test
public void loadPackageName() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getPackage().getName());
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
@Test
public void loadPackageNameWithoutDot() throws Exception {
// See gh-6126
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponentInPackageWithoutDot.class.getPackage().getName());
int loaded = loader.load();
assertThat(loaded, equalTo(1));
assertTrue(this.registry.containsBean("myComponentInPackageWithoutDot"));
}
@Test
public void loadPackageAndClassDoesNotDoubleAdd() throws Exception {
BeanDefinitionLoader loader = new BeanDefinitionLoader(this.registry,
MyComponent.class.getPackage(), MyComponent.class);
assertThat(loader.load()).isEqualTo(1);
assertThat(this.registry.containsBean("myComponent")).isTrue();
}
}
|
Use AssertJ in merged 1.3.x tests
|
spring-boot/src/test/java/org/springframework/boot/BeanDefinitionLoaderTests.java
|
Use AssertJ in merged 1.3.x tests
|
|
Java
|
apache-2.0
|
fc4720c10674ecadd8e506aa85d3d17949aa6d24
| 0
|
droolsjbpm/optaplanner,droolsjbpm/optaplanner,droolsjbpm/optaplanner,tkobayas/optaplanner,tkobayas/optaplanner,baldimir/optaplanner,baldimir/optaplanner,droolsjbpm/optaplanner,tkobayas/optaplanner,baldimir/optaplanner,tkobayas/optaplanner,baldimir/optaplanner
|
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.api.solver;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
import java.util.function.Function;
import org.optaplanner.core.api.domain.solution.PlanningSolution;
/**
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <ProblemId_> the ID type of a submitted problem, such as {@link Long} or {@link UUID}.
*/
public interface SolverJob<Solution_, ProblemId_> {
/**
* @return never null, a value given to {@link SolverManager#solveBatch(Object, Function, Consumer)}
* or {@link SolverManager#solveObserving(Object, Function, Consumer)}
*/
ProblemId_ getProblemId();
/**
* Returns if the {@link Solver} is scheduled to solve, actively solving or not.
* <p>
* Returns {@link SolverStatus#NOT_SOLVING} if the solver already terminated.
* @return never null
*/
SolverStatus getSolverStatus();
// TODO Future features
// void reloadProblem(Function<ProblemId_, Solution_> problemFinder);
// TODO Future features
// void addProblemFactChange(ProblemFactChange<Solution_> problemFactChange);
/**
* Terminates the solver or cancels the solver job if it hasn't (re)started yet.
* <p>
* Does nothing if the solver already terminated.
*/
void terminateEarly();
/**
* Waits if necessary for the solver to complete and then returns the final best {@link PlanningSolution}.
* @return never null, but it could be the original uninitialized problem
* @throws InterruptedException if the current thread was interrupted while waiting
* @throws ExecutionException if the computation threw an exception
*/
Solution_ getFinalBestSolution() throws InterruptedException, ExecutionException;
}
|
optaplanner-core/src/main/java/org/optaplanner/core/api/solver/SolverJob.java
|
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.api.solver;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
import java.util.function.Function;
import org.optaplanner.core.api.domain.solution.PlanningSolution;
/**
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <ProblemId_> the ID type of a submitted problem, such as {@link Long} or {@link UUID}.
*/
public interface SolverJob<Solution_, ProblemId_> {
/**
* @return never null, a value given to {@link SolverManager#solveBatch(Object, Function, Consumer)}
*or {@link SolverManager#solveObserving(Object, Function, Consumer)}
*/
ProblemId_ getProblemId();
/**
* Returns if the {@link Solver} is scheduled to solve, actively solving or not.
* <p>
* Returns {@link SolverStatus#NOT_SOLVING} if the solver already terminated.
* @return never null
*/
SolverStatus getSolverStatus();
// TODO Future features
// void reloadProblem(Function<ProblemId_, Solution_> problemFinder);
// TODO Future features
// void addProblemFactChange(ProblemFactChange<Solution_> problemFactChange);
/**
* Terminates the solver or cancels the solver job if it hasn't (re)started yet.
* <p>
* Does nothing if the solver already terminated.
*/
void terminateEarly();
/**
* Waits if necessary for the solver to complete and then returns the final best {@link PlanningSolution}.
* @return never null, but it could be the original uninitialized problem
* @throws InterruptedException if the current thread was interrupted while waiting
* @throws ExecutionException if the computation threw an exception
*/
Solution_ getFinalBestSolution() throws InterruptedException, ExecutionException;
}
|
Update optaplanner-core/src/main/java/org/optaplanner/core/api/solver/SolverJob.java
Co-Authored-By: Lukáš Petrovický <65adda4326914576405c9e3a62f4904d96573863@petrovicky.net>
|
optaplanner-core/src/main/java/org/optaplanner/core/api/solver/SolverJob.java
|
Update optaplanner-core/src/main/java/org/optaplanner/core/api/solver/SolverJob.java
|
|
Java
|
apache-2.0
|
3d861ff7987716b3e3b1c1535f32bf27ab7a0770
| 0
|
linqs/psl,linqs/psl,linqs/psl
|
/*
* This file is part of the PSL software.
* Copyright 2011-2015 University of Maryland
* Copyright 2013-2018 The Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.linqs.psl.database.rdbms;
import org.linqs.psl.util.StringUtils;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A histrogram that represents the distribution of values in a column.
* The DBMS may return this information as either buckets or specific values (if there are not that many unique values).
* This class will handle both and estimate join sizes between other histograms.
*
* If the conlumn type is an int, then a bucket will be assumed to be uniformly distributed.
* This will typically result in more accurate estimates.
*
* Internally, this class will use BigInteger.
* However, all APIs will use primitive or wrapper types to make it easy on the user.
*/
public class SelectivityHistogram<T extends Comparable<? super T>> {
/**
* A guess about how much of a bucket is in use when there is some overlap.
* This is only used if we have no information about the actual width of a bucket.
*/
public static final double BUCKET_USAGE_GUESS = 0.5;
/**
* The values we see must all be the same exact class.
*/
private Class columnType;
/**
* Valid histograms may be no entries in them (especially after joins).
*/
private boolean isEmpty;
/**
* The boundaries in a historgram of values for each column.
* Although start/end boundaries are specified,
* any values past those extremes will be moved into the first/last bucket.
* Assume that the bounds are [inclusive, exclusive).
* Except for the very last bound, which will be included in the last bucket.
* This find distinction will only actually apply when working with int bounds.
*/
private List<T> histogramBounds;
private List<BigInteger> histogramCounts;
/**
* If the cardinality of a column is low, then the database may just report those unique values.
* In this case, we can build a exact distribution of the column.
*/
private Map<T, BigInteger> exactHistogram;
private List<T> sortedExactHistogramKeys;
public SelectivityHistogram() {
this(false);
}
private SelectivityHistogram(boolean isEmpty) {
columnType = null;
this.isEmpty = isEmpty;
histogramBounds = null;
histogramCounts = null;
exactHistogram = null;
sortedExactHistogramKeys = null;
}
public void addHistogramBounds(List<T> bounds, List<? extends Number> counts) {
if (bounds.size() == 0 || counts.size() == 0) {
isEmpty = true;
return;
}
assert(bounds.size() == counts.size() + 1);
assert(bounds.size() >= 2);
histogramBounds = new ArrayList<T>(bounds);
histogramCounts = new ArrayList<BigInteger>(counts.size());
for (Number count : counts) {
if (count instanceof BigInteger) {
histogramCounts.add((BigInteger)count);
} else {
histogramCounts.add(BigInteger.valueOf(count.longValue()));
}
}
checkTypes(histogramBounds);
}
public void addHistogramExact(Map<T, ? extends Number> histogram) {
if (histogram.size() == 0) {
isEmpty = true;
return;
}
assert(histogram.size() > 0);
exactHistogram = new HashMap<T, BigInteger>();
for (Map.Entry<T, ? extends Number> entry : histogram.entrySet()) {
T key = entry.getKey();
Number count = entry.getValue();
if (count instanceof BigInteger) {
exactHistogram.put(key, (BigInteger)count);
} else {
exactHistogram.put(key, BigInteger.valueOf(count.longValue()));
}
}
sortedExactHistogramKeys = new ArrayList<T>(histogram.keySet());
Collections.sort(sortedExactHistogramKeys);
checkTypes(sortedExactHistogramKeys);
}
private void checkTypes(Iterable<T> values) {
for (T value : values) {
if (columnType == null) {
columnType = value.getClass();
}
if (value.getClass() != columnType) {
throw new IllegalArgumentException(String.format(
"Inconsistent types. Expected %s, found %s.",
columnType.getName(), value.getClass().getName()));
}
}
}
/**
* Get the number of rows represented by this histogram.
* For histograms created directly from database tables, the size should be equal to the table count.
* Note that this method is O(n).
* Overflows will return Long.MAX_VALUE.
*/
public long size() {
if (!isValid() || isEmpty) {
return 0;
}
BigInteger count = BigInteger.ZERO;
if (exactHistogram != null) {
for (BigInteger bucketCount : exactHistogram.values()) {
count = count.add(bucketCount);
}
} else {
for (BigInteger bucketCount : histogramCounts) {
count = count.add(bucketCount);
}
}
return count.min(BigInteger.valueOf(Long.MAX_VALUE)).longValue();
}
/**
* A histogram is not valid until either addHistogramBounds() or addHistogramExact() is called.
* Unless it is empty, empty histograms are always valid.
*/
public boolean isValid() {
return isEmpty || (columnType != null);
}
/**
* Get a new histogram that represents the join of this histogram with another.
*/
public SelectivityHistogram<T> join(SelectivityHistogram<T> other) {
if (!isValid() || !other.isValid()) {
throw new IllegalArgumentException("Connot compute join on invalid histograms.");
}
if (isEmpty || other.isEmpty) {
return new SelectivityHistogram<T>(true);
}
// Make sure the classes match exactly (referential equality works on Class).
if (columnType != other.columnType) {
throw new IllegalArgumentException(String.format(
"Both histograms must match column type exactly. Got %s and %s.",
columnType.getName(),
other.columnType.getName()));
}
if (exactHistogram != null && other.exactHistogram != null) {
return computeExactJoin(other);
}
if (exactHistogram != null) {
return computeExactBucketJoin(other);
}
if (other.exactHistogram != null) {
return other.computeExactBucketJoin(this);
}
return computeBucketJoin(other);
}
public String toString() {
if (isEmpty) {
return "Empty Histogram";
}
if (!isValid()) {
return "Invalid Histogram";
}
StringBuilder builder = new StringBuilder();
if (exactHistogram != null) {
for (int i = 0; i < sortedExactHistogramKeys.size(); i++) {
T exactValue = sortedExactHistogramKeys.get(i);
if (i != 0) {
builder.append(", ");
}
builder.append(exactValue);
builder.append(" (" + exactHistogram.get(exactValue) + ")");
}
if (histogramBounds != null) {
builder.append("\n");
}
}
if (histogramBounds != null) {
for (int i = 0; i < histogramCounts.size(); i++) {
if (i != 0) {
builder.append(", ");
}
T bucketStart = histogramBounds.get(i + 0);
T bucketEnd = histogramBounds.get(i + 1);
builder.append("[" + bucketStart + ", " + bucketEnd + "): " + histogramCounts.get(i));
}
}
return builder.toString();
}
/**
* Estimate the join size where both histograms are exact ones.
*/
private SelectivityHistogram<T> computeExactJoin(SelectivityHistogram<T> other) {
Map<T, BigInteger> result = new HashMap<T, BigInteger>();
for (Map.Entry<T, BigInteger> entry : exactHistogram.entrySet()) {
T columnValue = entry.getKey();
BigInteger count = entry.getValue();
if (other.exactHistogram.containsKey(columnValue)) {
result.put(columnValue, count.multiply(other.exactHistogram.get(columnValue)));
}
}
SelectivityHistogram<T> histogram = new SelectivityHistogram<T>();
histogram.addHistogramExact(result);
return histogram;
}
/**
* Estimate the join size where the context histogram (this) is an exact one
* and the other histogram is a bucket histogram.
*/
private SelectivityHistogram<T> computeExactBucketJoin(SelectivityHistogram<T> other) {
Map<T, BigInteger> result = new HashMap<T, BigInteger>();
int currentExactIndex = 0;
int bucketIndex = 0;
while (true) {
// If we examined all the exact values, then we are done.
if (currentExactIndex == sortedExactHistogramKeys.size()) {
break;
}
T currentExactValue = sortedExactHistogramKeys.get(currentExactIndex);
// If there are no more buckets, then the exact value must be in the last bucket.
if (bucketIndex == other.histogramCounts.size() - 1) {
currentExactIndex++;
BigInteger bucketCount = other.bucketOverlap(
currentExactValue, currentExactValue,
other.histogramBounds.get(bucketIndex + 0), other.histogramBounds.get(bucketIndex + 1),
other.histogramCounts.get(bucketIndex));
result.put(currentExactValue, bucketCount.multiply(exactHistogram.get(currentExactValue)));
continue;
}
T bucketStartValue = other.histogramBounds.get(bucketIndex + 0);
T bucketEndValue = other.histogramBounds.get(bucketIndex + 1);
// If the current value is past this bucket, then move the bucket forward.
if (currentExactValue.compareTo(bucketEndValue) > 0) {
bucketIndex++;
continue;
}
// Now the exact value must be either before or in this bucket.
// It is only possible to be before this bucket if this is the first bucket.
// Either way, put the exact value in this bucekt.
currentExactIndex++;
BigInteger bucketCount = other.bucketOverlap(
currentExactValue, currentExactValue,
bucketStartValue, bucketEndValue,
other.histogramCounts.get(bucketIndex));
result.put(currentExactValue, bucketCount.multiply(exactHistogram.get(currentExactValue)));
}
SelectivityHistogram<T> histogram = new SelectivityHistogram<T>();
histogram.addHistogramExact(result);
return histogram;
}
/**
* Estimate the join size where both histograms are bucket ones.
*/
private SelectivityHistogram<T> computeBucketJoin(SelectivityHistogram<T> other) {
// The buckets are required to be contiguous, so we may have some buckets with zero counts.
List<T> bounds = new ArrayList<T>();
List<BigInteger> counts = new ArrayList<BigInteger>();
boolean emptyBucket = false;
int contextBucketIndex = 0;
int otherBucketIndex = 0;
// Because we have no guarentees on the size or overlap of the buckets,
// we cannot just loop over the buckets.
// Instead, we have to move along and advance each bucket indivudually.
T currentRangeStart = null;
T currentRangeEnd = null;
// In each loop, we will move one bucket forward.
// If there is an overlap, we may also add to our row count.
while (true) {
// Stop if either bucket is out of range.
if (contextBucketIndex == histogramCounts.size() || otherBucketIndex == other.histogramCounts.size()) {
break;
}
T contextBucketStart = histogramBounds.get(contextBucketIndex + 0);
T contextBucketEnd = histogramBounds.get(contextBucketIndex + 1);
BigInteger contextCount = histogramCounts.get(contextBucketIndex);
T otherBucketStart = other.histogramBounds.get(otherBucketIndex + 0);
T otherBucketEnd = other.histogramBounds.get(otherBucketIndex + 1);
BigInteger otherCount = other.histogramCounts.get(otherBucketIndex);
// Start at the further forward of the bucket starts.
int startComparison = contextBucketStart.compareTo(otherBucketStart);
if (startComparison < 0) {
currentRangeStart = otherBucketStart;
} else {
currentRangeStart = contextBucketStart;
}
// End at the earlier of the bucket ends.
int endComparison = contextBucketEnd.compareTo(otherBucketEnd);
if (endComparison < 0) {
currentRangeEnd = contextBucketEnd;
} else {
currentRangeEnd = otherBucketEnd;
}
// Now move the bucket that ends first forward.
if (endComparison <= 0) {
// Move the context bucket.
contextBucketIndex++;
}
if (endComparison >= 0) {
// Move the other bucket.
otherBucketIndex++;
}
// If there is no overlap, just move to the next range.
if (currentRangeStart.compareTo(currentRangeEnd) > 0) {
emptyBucket = true;
continue;
}
// Compute how much of each bucket the range is overlapping.
BigInteger contextBucketCount = bucketOverlap(
currentRangeStart, currentRangeEnd,
contextBucketStart, contextBucketEnd,
contextCount);
BigInteger otherBucketCount = other.bucketOverlap(
currentRangeStart, currentRangeEnd,
otherBucketStart, otherBucketEnd,
otherCount);
// Make sure to add in the first bound.
// We also want to make sure that we explicitly add in buckets that are empty,
// so we can make out non-empty buckets as small as possible.
if (bounds.size() == 0 || emptyBucket) {
emptyBucket = false;
bounds.add(currentRangeStart);
}
bounds.add(currentRangeEnd);
counts.add(contextBucketCount.multiply(otherBucketCount));
}
SelectivityHistogram<T> histogram = new SelectivityHistogram<T>();
histogram.addHistogramBounds(bounds, counts);
return histogram;
}
/**
* Estimate how much a bucket overlaps with some range.
*/
private BigInteger bucketOverlap(T rangeStart, T rangeEnd, T bucketStart, T bucketEnd, BigInteger bucketCount) {
// We have two general cases: the entire bucket is used or a portion of the bucket is being used.
BigDecimal floatBucketCount = new BigDecimal(bucketCount);
// All the bucket is being used.
if (rangeStart.compareTo(bucketStart) < 0 && rangeEnd.compareTo(bucketEnd) > 0) {
return bucketCount;
}
// A portion of the bucket is being used.
// If we are dealing with ints, then we can compute the portion of the bucket being used.
// Just assume a uniform distribution over the bucket.
if (columnType == Integer.class) {
int bucketSize = ((Integer)bucketEnd).intValue() - ((Integer)bucketStart).intValue() + 1;
int overlapStart = Math.max(((Integer)rangeStart).intValue(), ((Integer)bucketStart).intValue());
int overlapEnd = Math.min(((Integer)rangeEnd).intValue(), ((Integer)bucketEnd).intValue());
int overlapSize = overlapEnd - overlapStart;
// If we are comparing with an exact value, we will pass the same values for the range start/end.
// In this case, we will want to add one tot he overlap.
if (overlapSize == 0) {
overlapSize = 1;
}
BigDecimal count = floatBucketCount.multiply(BigDecimal.valueOf((double)overlapSize / bucketSize));
return count.setScale(0, RoundingMode.CEILING).toBigInteger();
}
// If we are using strings, then we cannot make any assumptions about the width
// of the bucket and we will just use our standard load factor.
BigDecimal count = floatBucketCount.multiply(BigDecimal.valueOf(BUCKET_USAGE_GUESS));
return count.setScale(0, RoundingMode.CEILING).toBigInteger();
}
}
|
psl-core/src/main/java/org/linqs/psl/database/rdbms/SelectivityHistogram.java
|
/*
* This file is part of the PSL software.
* Copyright 2011-2015 University of Maryland
* Copyright 2013-2018 The Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.linqs.psl.database.rdbms;
import org.linqs.psl.util.StringUtils;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A histrogram that represents the distribution of values in a column.
* The DBMS may return this information as either buckets or specific values (if there are not that many unique values).
* This class will handle both and estimate join sizes between other histograms.
*
* If the conlumn type is an int, then a bucket will be assumed to be uniformly distributed.
* This will typically result in more accurate estimates.
*
* Internally, this class will use BigInteger.
* However, all APIs will use primitive or wrapper types to make it easy on the user.
*/
public class SelectivityHistogram<T extends Comparable<? super T>> {
/**
* A guess about how much of a bucket is in use when there is some overlap.
* This is only used if we have no information about the actual width of a bucket.
*/
public static final double BUCKET_USAGE_GUESS = 0.5;
/**
* The values we see must all be the same exact class.
*/
private Class columnType;
/**
* Valid histograms may be no entries in them (especially after joins).
*/
private boolean isEmpty;
/**
* The boundaries in a historgram of values for each column.
* Although start/end boundaries are specified,
* any values past those extremes will be moved into the first/last bucket.
* Assume that the bounds are [inclusive, exclusive).
* Except for the very last bound, which will be included in the last bucket.
* This find distinction will only actually apply when working with int bounds.
*/
private List<T> histogramBounds;
private List<BigInteger> histogramCounts;
/**
* If the cardinality of a column is low, then the database may just report those unique values.
* In this case, we can build a exact distribution of the column.
*/
private Map<T, BigInteger> exactHistogram;
private List<T> sortedExactHistogramKeys;
public SelectivityHistogram() {
this(false);
}
private SelectivityHistogram(boolean isEmpty) {
columnType = null;
this.isEmpty = isEmpty;
histogramBounds = null;
histogramCounts = null;
exactHistogram = null;
sortedExactHistogramKeys = null;
}
public void addHistogramBounds(List<T> bounds, List<? extends Number> counts) {
if (bounds.size() == 0 && counts.size() == 0) {
isEmpty = true;
return;
}
assert(bounds.size() == counts.size() + 1);
assert(bounds.size() >= 2);
histogramBounds = new ArrayList<T>(bounds);
histogramCounts = new ArrayList<BigInteger>(counts.size());
for (Number count : counts) {
if (count instanceof BigInteger) {
histogramCounts.add((BigInteger)count);
} else {
histogramCounts.add(BigInteger.valueOf(count.longValue()));
}
}
checkTypes(histogramBounds);
}
public void addHistogramExact(Map<T, ? extends Number> histogram) {
if (histogram.size() == 0) {
isEmpty = true;
return;
}
assert(histogram.size() > 0);
exactHistogram = new HashMap<T, BigInteger>();
for (Map.Entry<T, ? extends Number> entry : histogram.entrySet()) {
T key = entry.getKey();
Number count = entry.getValue();
if (count instanceof BigInteger) {
exactHistogram.put(key, (BigInteger)count);
} else {
exactHistogram.put(key, BigInteger.valueOf(count.longValue()));
}
}
sortedExactHistogramKeys = new ArrayList<T>(histogram.keySet());
Collections.sort(sortedExactHistogramKeys);
checkTypes(sortedExactHistogramKeys);
}
private void checkTypes(Iterable<T> values) {
for (T value : values) {
if (columnType == null) {
columnType = value.getClass();
}
if (value.getClass() != columnType) {
throw new IllegalArgumentException(String.format(
"Inconsistent types. Expected %s, found %s.",
columnType.getName(), value.getClass().getName()));
}
}
}
/**
* Get the number of rows represented by this histogram.
* For histograms created directly from database tables, the size should be equal to the table count.
* Note that this method is O(n).
* Overflows will return Long.MAX_VALUE.
*/
public long size() {
if (!isValid() || isEmpty) {
return 0;
}
BigInteger count = BigInteger.ZERO;
if (exactHistogram != null) {
for (BigInteger bucketCount : exactHistogram.values()) {
count = count.add(bucketCount);
}
} else {
for (BigInteger bucketCount : histogramCounts) {
count = count.add(bucketCount);
}
}
return count.min(BigInteger.valueOf(Long.MAX_VALUE)).longValue();
}
/**
* A histogram is not valid until either addHistogramBounds() or addHistogramExact() is called.
* Unless it is empty, empty histograms are always valid.
*/
public boolean isValid() {
return isEmpty || (columnType != null);
}
/**
* Get a new histogram that represents the join of this histogram with another.
*/
public SelectivityHistogram<T> join(SelectivityHistogram<T> other) {
if (!isValid() || !other.isValid()) {
throw new IllegalArgumentException("Connot compute join on invalid histograms.");
}
if (isEmpty || other.isEmpty) {
return new SelectivityHistogram<T>(true);
}
// Make sure the classes match exactly (referential equality works on Class).
if (columnType != other.columnType) {
throw new IllegalArgumentException(String.format(
"Both histograms must match column type exactly. Got %s and %s.",
columnType.getName(),
other.columnType.getName()));
}
if (exactHistogram != null && other.exactHistogram != null) {
return computeExactJoin(other);
}
if (exactHistogram != null) {
return computeExactBucketJoin(other);
}
if (other.exactHistogram != null) {
return other.computeExactBucketJoin(this);
}
return computeBucketJoin(other);
}
public String toString() {
if (isEmpty) {
return "Empty Histogram";
}
if (!isValid()) {
return "Invalid Histogram";
}
StringBuilder builder = new StringBuilder();
if (exactHistogram != null) {
for (int i = 0; i < sortedExactHistogramKeys.size(); i++) {
T exactValue = sortedExactHistogramKeys.get(i);
if (i != 0) {
builder.append(", ");
}
builder.append(exactValue);
builder.append(" (" + exactHistogram.get(exactValue) + ")");
}
if (histogramBounds != null) {
builder.append("\n");
}
}
if (histogramBounds != null) {
for (int i = 0; i < histogramCounts.size(); i++) {
if (i != 0) {
builder.append(", ");
}
T bucketStart = histogramBounds.get(i + 0);
T bucketEnd = histogramBounds.get(i + 1);
builder.append("[" + bucketStart + ", " + bucketEnd + "): " + histogramCounts.get(i));
}
}
return builder.toString();
}
/**
* Estimate the join size where both histograms are exact ones.
*/
private SelectivityHistogram<T> computeExactJoin(SelectivityHistogram<T> other) {
Map<T, BigInteger> result = new HashMap<T, BigInteger>();
for (Map.Entry<T, BigInteger> entry : exactHistogram.entrySet()) {
T columnValue = entry.getKey();
BigInteger count = entry.getValue();
if (other.exactHistogram.containsKey(columnValue)) {
result.put(columnValue, count.multiply(other.exactHistogram.get(columnValue)));
}
}
SelectivityHistogram<T> histogram = new SelectivityHistogram<T>();
histogram.addHistogramExact(result);
return histogram;
}
/**
* Estimate the join size where the context histogram (this) is an exact one
* and the other histogram is a bucket histogram.
*/
private SelectivityHistogram<T> computeExactBucketJoin(SelectivityHistogram<T> other) {
Map<T, BigInteger> result = new HashMap<T, BigInteger>();
int currentExactIndex = 0;
int bucketIndex = 0;
while (true) {
// If we examined all the exact values, then we are done.
if (currentExactIndex == sortedExactHistogramKeys.size()) {
break;
}
T currentExactValue = sortedExactHistogramKeys.get(currentExactIndex);
// If there are no more buckets, then the exact value must be in the last bucket.
if (bucketIndex == other.histogramCounts.size() - 1) {
currentExactIndex++;
BigInteger bucketCount = other.bucketOverlap(
currentExactValue, currentExactValue,
other.histogramBounds.get(bucketIndex - 1), other.histogramBounds.get(bucketIndex - 0),
other.histogramCounts.get(bucketIndex));
result.put(currentExactValue, bucketCount.multiply(exactHistogram.get(currentExactValue)));
continue;
}
T bucketStartValue = other.histogramBounds.get(bucketIndex + 0);
T bucketEndValue = other.histogramBounds.get(bucketIndex + 1);
// If the current value is past this bucket, then move the bucket forward.
if (currentExactValue.compareTo(bucketEndValue) > 0) {
bucketIndex++;
continue;
}
// Now the exact value must be either before or in this bucket.
// It is only possible to be before this bucket if this is the first bucket.
// Either way, put the exact value in this bucekt.
currentExactIndex++;
BigInteger bucketCount = other.bucketOverlap(
currentExactValue, currentExactValue,
bucketStartValue, bucketEndValue,
other.histogramCounts.get(bucketIndex));
result.put(currentExactValue, bucketCount.multiply(exactHistogram.get(currentExactValue)));
}
SelectivityHistogram<T> histogram = new SelectivityHistogram<T>();
histogram.addHistogramExact(result);
return histogram;
}
/**
* Estimate the join size where both histograms are bucket ones.
*/
private SelectivityHistogram<T> computeBucketJoin(SelectivityHistogram<T> other) {
// The buckets are required to be contiguous, so we may have some buckets with zero counts.
List<T> bounds = new ArrayList<T>();
List<BigInteger> counts = new ArrayList<BigInteger>();
boolean emptyBucket = false;
int contextBucketIndex = 0;
int otherBucketIndex = 0;
// Because we have no guarentees on the size or overlap of the buckets,
// we cannot just loop over the buckets.
// Instead, we have to move along and advance each bucket indivudually.
T currentRangeStart = null;
T currentRangeEnd = null;
// In each loop, we will move one bucket forward.
// If there is an overlap, we may also add to our row count.
while (true) {
// Stop if either bucket is out of range.
if (contextBucketIndex == histogramCounts.size() || otherBucketIndex == other.histogramCounts.size()) {
break;
}
T contextBucketStart = histogramBounds.get(contextBucketIndex + 0);
T contextBucketEnd = histogramBounds.get(contextBucketIndex + 1);
BigInteger contextCount = histogramCounts.get(contextBucketIndex);
T otherBucketStart = other.histogramBounds.get(otherBucketIndex + 0);
T otherBucketEnd = other.histogramBounds.get(otherBucketIndex + 1);
BigInteger otherCount = other.histogramCounts.get(otherBucketIndex);
// Start at the further forward of the bucket starts.
int startComparison = contextBucketStart.compareTo(otherBucketStart);
if (startComparison < 0) {
currentRangeStart = otherBucketStart;
} else {
currentRangeStart = contextBucketStart;
}
// End at the earlier of the bucket ends.
int endComparison = contextBucketEnd.compareTo(otherBucketEnd);
if (endComparison < 0) {
currentRangeEnd = contextBucketEnd;
} else {
currentRangeEnd = otherBucketEnd;
}
// Now move the bucket that ends first forward.
if (endComparison <= 0) {
// Move the context bucket.
contextBucketIndex++;
}
if (endComparison >= 0) {
// Move the other bucket.
otherBucketIndex++;
}
// If there is no overlap, just move to the next range.
if (currentRangeStart.compareTo(currentRangeEnd) > 0) {
emptyBucket = true;
continue;
}
// Compute how much of each bucket the range is overlapping.
BigInteger contextBucketCount = bucketOverlap(
currentRangeStart, currentRangeEnd,
contextBucketStart, contextBucketEnd,
contextCount);
BigInteger otherBucketCount = other.bucketOverlap(
currentRangeStart, currentRangeEnd,
otherBucketStart, otherBucketEnd,
otherCount);
// Make sure to add in the first bound.
// We also want to make sure that we explicitly add in buckets that are empty,
// so we can make out non-empty buckets as small as possible.
if (bounds.size() == 0 || emptyBucket) {
emptyBucket = false;
bounds.add(currentRangeStart);
}
bounds.add(currentRangeEnd);
counts.add(contextBucketCount.multiply(otherBucketCount));
}
SelectivityHistogram<T> histogram = new SelectivityHistogram<T>();
histogram.addHistogramBounds(bounds, counts);
return histogram;
}
/**
* Estimate how much a bucket overlaps with some range.
*/
private BigInteger bucketOverlap(T rangeStart, T rangeEnd, T bucketStart, T bucketEnd, BigInteger bucketCount) {
// We have two general cases: the entire bucket is used or a portion of the bucket is being used.
BigDecimal floatBucketCount = new BigDecimal(bucketCount);
// All the bucket is being used.
if (rangeStart.compareTo(bucketStart) < 0 && rangeEnd.compareTo(bucketEnd) > 0) {
return bucketCount;
}
// A portion of the bucket is being used.
// If we are dealing with ints, then we can compute the portion of the bucket being used.
// Just assume a uniform distribution over the bucket.
if (columnType == Integer.class) {
int bucketSize = ((Integer)bucketEnd).intValue() - ((Integer)bucketStart).intValue() + 1;
int overlapStart = Math.max(((Integer)rangeStart).intValue(), ((Integer)bucketStart).intValue());
int overlapEnd = Math.min(((Integer)rangeEnd).intValue(), ((Integer)bucketEnd).intValue());
int overlapSize = overlapEnd - overlapStart;
// If we are comparing with an exact value, we will pass the same values for the range start/end.
// In this case, we will want to add one tot he overlap.
if (overlapSize == 0) {
overlapSize = 1;
}
BigDecimal count = floatBucketCount.multiply(BigDecimal.valueOf((double)overlapSize / bucketSize));
return count.setScale(0, RoundingMode.CEILING).toBigInteger();
}
// If we are using strings, then we cannot make any assumptions about the width
// of the bucket and we will just use our standard load factor.
BigDecimal count = floatBucketCount.multiply(BigDecimal.valueOf(BUCKET_USAGE_GUESS));
return count.setScale(0, RoundingMode.CEILING).toBigInteger();
}
}
|
Fixed an off by one with bucket joins.
|
psl-core/src/main/java/org/linqs/psl/database/rdbms/SelectivityHistogram.java
|
Fixed an off by one with bucket joins.
|
|
Java
|
apache-2.0
|
be3118334fc2509023a31ce21ef826651e5cd955
| 0
|
11xor6/presto,hgschmie/presto,11xor6/presto,erichwang/presto,dain/presto,Praveen2112/presto,hgschmie/presto,treasure-data/presto,11xor6/presto,ebyhr/presto,martint/presto,Praveen2112/presto,treasure-data/presto,martint/presto,losipiuk/presto,erichwang/presto,smartnews/presto,smartnews/presto,martint/presto,losipiuk/presto,treasure-data/presto,martint/presto,smartnews/presto,treasure-data/presto,11xor6/presto,erichwang/presto,ebyhr/presto,treasure-data/presto,losipiuk/presto,dain/presto,ebyhr/presto,hgschmie/presto,losipiuk/presto,Praveen2112/presto,dain/presto,martint/presto,erichwang/presto,hgschmie/presto,erichwang/presto,Praveen2112/presto,electrum/presto,treasure-data/presto,ebyhr/presto,electrum/presto,electrum/presto,11xor6/presto,dain/presto,smartnews/presto,losipiuk/presto,dain/presto,hgschmie/presto,electrum/presto,smartnews/presto,ebyhr/presto,Praveen2112/presto,electrum/presto
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution.executor;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.units.Duration;
import io.prestosql.execution.SplitRunner;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static io.airlift.units.Duration.succinctNanos;
import static io.prestosql.operator.Operator.NOT_BLOCKED;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
abstract class SimulationSplit
implements SplitRunner
{
private final SimulationTask task;
private final AtomicInteger calls = new AtomicInteger(0);
private final long createdNanos = System.nanoTime();
private final AtomicLong completedProcessNanos = new AtomicLong();
private final AtomicLong startNanos = new AtomicLong(-1);
private final AtomicLong doneNanos = new AtomicLong(-1);
private final AtomicLong waitNanos = new AtomicLong();
private final AtomicLong lastReadyTime = new AtomicLong(-1);
private final AtomicBoolean killed = new AtomicBoolean(false);
private final long scheduledTimeNanos;
SimulationSplit(SimulationTask task, long scheduledTimeNanos)
{
this.task = requireNonNull(task, "task is null");
this.scheduledTimeNanos = scheduledTimeNanos;
}
long getCreatedNanos()
{
return createdNanos;
}
long getCompletedProcessNanos()
{
return completedProcessNanos.get();
}
long getStartNanos()
{
return startNanos.get();
}
long getDoneNanos()
{
return doneNanos.get();
}
long getWaitNanos()
{
return waitNanos.get();
}
int getCalls()
{
return calls.get();
}
long getScheduledTimeNanos()
{
return scheduledTimeNanos;
}
String getTaskId()
{
return task.getTaskId().toString();
}
SimulationTask getTask()
{
return task;
}
boolean isKilled()
{
return killed.get();
}
void setKilled()
{
waitNanos.addAndGet(System.nanoTime() - lastReadyTime.get());
killed.set(true);
task.setKilled();
}
@Override
public boolean isFinished()
{
return doneNanos.get() >= 0;
}
@Override
public void close()
{
}
abstract boolean process();
abstract ListenableFuture<?> getProcessResult();
void setSplitReady()
{
lastReadyTime.set(System.nanoTime());
}
@Override
public ListenableFuture<?> processFor(Duration duration)
{
calls.incrementAndGet();
long callStart = System.nanoTime();
startNanos.compareAndSet(-1, callStart);
lastReadyTime.compareAndSet(-1, callStart);
waitNanos.addAndGet(callStart - lastReadyTime.get());
boolean done = process();
long callEnd = System.nanoTime();
completedProcessNanos.addAndGet(callEnd - callStart);
if (done) {
doneNanos.compareAndSet(-1, callEnd);
if (!isKilled()) {
task.splitComplete(this);
}
return immediateFuture(null);
}
ListenableFuture<?> processResult = getProcessResult();
if (processResult.isDone()) {
setSplitReady();
}
return processResult;
}
static class LeafSplit
extends SimulationSplit
{
private final long perQuantaNanos;
public LeafSplit(SimulationTask task, long scheduledTimeNanos, long perQuantaNanos)
{
super(task, scheduledTimeNanos);
this.perQuantaNanos = perQuantaNanos;
}
@Override
public boolean process()
{
if (getCompletedProcessNanos() >= super.scheduledTimeNanos) {
return true;
}
long processNanos = Math.min(super.scheduledTimeNanos - getCompletedProcessNanos(), perQuantaNanos);
if (processNanos > 0) {
try {
NANOSECONDS.sleep(processNanos);
}
catch (InterruptedException e) {
setKilled();
return true;
}
}
return false;
}
@Override
public ListenableFuture<?> getProcessResult()
{
return NOT_BLOCKED;
}
@Override
public String getInfo()
{
double pct = (100.0 * getCompletedProcessNanos() / super.scheduledTimeNanos);
return format(
"leaf %3s%% done (total: %8s, per quanta: %8s)",
(int) (pct > 100.00 ? 100.0 : pct),
succinctNanos(super.scheduledTimeNanos),
succinctNanos(perQuantaNanos));
}
}
static class IntermediateSplit
extends SimulationSplit
{
private final long wallTimeNanos;
private final long numQuantas;
private final long perQuantaNanos;
private final long betweenQuantaNanos;
private final ScheduledExecutorService executorService;
private SettableFuture<?> future = SettableFuture.create();
private SettableFuture<?> doneFuture = SettableFuture.create();
public IntermediateSplit(SimulationTask task, long scheduledTimeNanos, long wallTimeNanos, long numQuantas, long perQuantaNanos, long betweenQuantaNanos, ScheduledExecutorService executorService)
{
super(task, scheduledTimeNanos);
this.wallTimeNanos = wallTimeNanos;
this.numQuantas = numQuantas;
this.perQuantaNanos = perQuantaNanos;
this.betweenQuantaNanos = betweenQuantaNanos;
this.executorService = executorService;
doneFuture.set(null);
}
@Override
public boolean process()
{
try {
if (getCalls() < numQuantas) {
NANOSECONDS.sleep(perQuantaNanos);
return false;
}
}
catch (InterruptedException ignored) {
setKilled();
return true;
}
return true;
}
@Override
public ListenableFuture<?> getProcessResult()
{
future = SettableFuture.create();
try {
executorService.schedule(() -> {
try {
if (!executorService.isShutdown()) {
future.set(null);
}
else {
setKilled();
}
setSplitReady();
}
catch (RuntimeException ignored) {
setKilled();
}
}, betweenQuantaNanos, NANOSECONDS);
}
catch (RejectedExecutionException ignored) {
setKilled();
return doneFuture;
}
return future;
}
@Override
public String getInfo()
{
double pct = (100.0 * getCalls() / numQuantas);
return format("intr %3s%% done (wall: %9s, per quanta: %8s, between quanta: %8s)",
(int) (pct > 100.00 ? 100.0 : pct),
succinctNanos(wallTimeNanos),
succinctNanos(perQuantaNanos),
succinctNanos(betweenQuantaNanos));
}
}
}
|
presto-main/src/test/java/io/prestosql/execution/executor/SimulationSplit.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution.executor;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.units.Duration;
import io.prestosql.execution.SplitRunner;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static io.airlift.units.Duration.succinctNanos;
import static io.prestosql.operator.Operator.NOT_BLOCKED;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
abstract class SimulationSplit
implements SplitRunner
{
private final SimulationTask task;
private final AtomicInteger calls = new AtomicInteger(0);
private final long createdNanos = System.nanoTime();
private final AtomicLong completedProcessNanos = new AtomicLong();
private final AtomicLong startNanos = new AtomicLong(-1);
private final AtomicLong doneNanos = new AtomicLong(-1);
private final AtomicLong waitNanos = new AtomicLong();
private final AtomicLong lastReadyTime = new AtomicLong(-1);
private final AtomicBoolean killed = new AtomicBoolean(false);
private final long scheduledTimeNanos;
SimulationSplit(SimulationTask task, long scheduledTimeNanos)
{
this.task = requireNonNull(task, "task is null");
this.scheduledTimeNanos = scheduledTimeNanos;
}
long getCreatedNanos()
{
return createdNanos;
}
long getCompletedProcessNanos()
{
return completedProcessNanos.get();
}
long getStartNanos()
{
return startNanos.get();
}
long getDoneNanos()
{
return doneNanos.get();
}
long getWaitNanos()
{
return waitNanos.get();
}
int getCalls()
{
return calls.get();
}
long getScheduledTimeNanos()
{
return scheduledTimeNanos;
}
String getTaskId()
{
return task.getTaskId().toString();
}
SimulationTask getTask()
{
return task;
}
boolean isKilled()
{
return killed.get();
}
void setKilled()
{
waitNanos.addAndGet(System.nanoTime() - lastReadyTime.get());
killed.set(true);
task.setKilled();
}
@Override
public boolean isFinished()
{
return doneNanos.get() >= 0;
}
@Override
public void close()
{
}
abstract boolean process();
abstract ListenableFuture<?> getProcessResult();
void setSplitReady()
{
lastReadyTime.set(System.nanoTime());
}
@Override
public ListenableFuture<?> processFor(Duration duration)
{
calls.incrementAndGet();
long callStart = System.nanoTime();
startNanos.compareAndSet(-1, callStart);
lastReadyTime.compareAndSet(-1, callStart);
waitNanos.addAndGet(callStart - lastReadyTime.get());
boolean done = process();
long callEnd = System.nanoTime();
completedProcessNanos.addAndGet(callEnd - callStart);
if (done) {
doneNanos.compareAndSet(-1, callEnd);
if (!isKilled()) {
task.splitComplete(this);
}
return Futures.immediateCheckedFuture(null);
}
ListenableFuture<?> processResult = getProcessResult();
if (processResult.isDone()) {
setSplitReady();
}
return processResult;
}
static class LeafSplit
extends SimulationSplit
{
private final long perQuantaNanos;
public LeafSplit(SimulationTask task, long scheduledTimeNanos, long perQuantaNanos)
{
super(task, scheduledTimeNanos);
this.perQuantaNanos = perQuantaNanos;
}
@Override
public boolean process()
{
if (getCompletedProcessNanos() >= super.scheduledTimeNanos) {
return true;
}
long processNanos = Math.min(super.scheduledTimeNanos - getCompletedProcessNanos(), perQuantaNanos);
if (processNanos > 0) {
try {
NANOSECONDS.sleep(processNanos);
}
catch (InterruptedException e) {
setKilled();
return true;
}
}
return false;
}
@Override
public ListenableFuture<?> getProcessResult()
{
return NOT_BLOCKED;
}
@Override
public String getInfo()
{
double pct = (100.0 * getCompletedProcessNanos() / super.scheduledTimeNanos);
return format(
"leaf %3s%% done (total: %8s, per quanta: %8s)",
(int) (pct > 100.00 ? 100.0 : pct),
succinctNanos(super.scheduledTimeNanos),
succinctNanos(perQuantaNanos));
}
}
static class IntermediateSplit
extends SimulationSplit
{
private final long wallTimeNanos;
private final long numQuantas;
private final long perQuantaNanos;
private final long betweenQuantaNanos;
private final ScheduledExecutorService executorService;
private SettableFuture<?> future = SettableFuture.create();
private SettableFuture<?> doneFuture = SettableFuture.create();
public IntermediateSplit(SimulationTask task, long scheduledTimeNanos, long wallTimeNanos, long numQuantas, long perQuantaNanos, long betweenQuantaNanos, ScheduledExecutorService executorService)
{
super(task, scheduledTimeNanos);
this.wallTimeNanos = wallTimeNanos;
this.numQuantas = numQuantas;
this.perQuantaNanos = perQuantaNanos;
this.betweenQuantaNanos = betweenQuantaNanos;
this.executorService = executorService;
doneFuture.set(null);
}
@Override
public boolean process()
{
try {
if (getCalls() < numQuantas) {
NANOSECONDS.sleep(perQuantaNanos);
return false;
}
}
catch (InterruptedException ignored) {
setKilled();
return true;
}
return true;
}
@Override
public ListenableFuture<?> getProcessResult()
{
future = SettableFuture.create();
try {
executorService.schedule(() -> {
try {
if (!executorService.isShutdown()) {
future.set(null);
}
else {
setKilled();
}
setSplitReady();
}
catch (RuntimeException ignored) {
setKilled();
}
}, betweenQuantaNanos, NANOSECONDS);
}
catch (RejectedExecutionException ignored) {
setKilled();
return doneFuture;
}
return future;
}
@Override
public String getInfo()
{
double pct = (100.0 * getCalls() / numQuantas);
return format("intr %3s%% done (wall: %9s, per quanta: %8s, between quanta: %8s)",
(int) (pct > 100.00 ? 100.0 : pct),
succinctNanos(wallTimeNanos),
succinctNanos(perQuantaNanos),
succinctNanos(betweenQuantaNanos));
}
}
}
|
Remove usage of deprecated immedateCheckedFuture
|
presto-main/src/test/java/io/prestosql/execution/executor/SimulationSplit.java
|
Remove usage of deprecated immedateCheckedFuture
|
|
Java
|
apache-2.0
|
7385118528f155315e5bfcdf806a8780a6237599
| 0
|
GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,cinderella/incubator-cloudstack,resmo/cloudstack,wido/cloudstack,resmo/cloudstack,argv0/cloudstack,mufaddalq/cloudstack-datera-driver,argv0/cloudstack,cinderella/incubator-cloudstack,jcshen007/cloudstack,wido/cloudstack,wido/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,cinderella/incubator-cloudstack,cinderella/incubator-cloudstack,mufaddalq/cloudstack-datera-driver,GabrielBrascher/cloudstack,argv0/cloudstack,jcshen007/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,wido/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,argv0/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,wido/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack
|
/**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* aLong with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.api;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import org.apache.log4j.Logger;
import com.cloud.api.commands.QueryAsyncJobResultCmd;
import com.cloud.api.response.AccountResponse;
import com.cloud.api.response.ApiResponseSerializer;
import com.cloud.api.response.AsyncJobResponse;
import com.cloud.api.response.CapacityResponse;
import com.cloud.api.response.ClusterResponse;
import com.cloud.api.response.ConfigurationResponse;
import com.cloud.api.response.CreateCmdResponse;
import com.cloud.api.response.DiskOfferingResponse;
import com.cloud.api.response.DomainResponse;
import com.cloud.api.response.DomainRouterResponse;
import com.cloud.api.response.EventResponse;
import com.cloud.api.response.ExtractResponse;
import com.cloud.api.response.FirewallRuleResponse;
import com.cloud.api.response.HostResponse;
import com.cloud.api.response.IPAddressResponse;
import com.cloud.api.response.IngressRuleResponse;
import com.cloud.api.response.InstanceGroupResponse;
import com.cloud.api.response.IpForwardingRuleResponse;
import com.cloud.api.response.ListResponse;
import com.cloud.api.response.LoadBalancerResponse;
import com.cloud.api.response.NetworkGroupResponse;
import com.cloud.api.response.NetworkOfferingResponse;
import com.cloud.api.response.NetworkResponse;
import com.cloud.api.response.NicResponse;
import com.cloud.api.response.PodResponse;
import com.cloud.api.response.PreallocatedLunResponse;
import com.cloud.api.response.RemoteAccessVpnResponse;
import com.cloud.api.response.ResourceLimitResponse;
import com.cloud.api.response.ServiceOfferingResponse;
import com.cloud.api.response.SnapshotPolicyResponse;
import com.cloud.api.response.SnapshotResponse;
import com.cloud.api.response.StoragePoolResponse;
import com.cloud.api.response.SystemVmResponse;
import com.cloud.api.response.TemplatePermissionsResponse;
import com.cloud.api.response.TemplateResponse;
import com.cloud.api.response.UserResponse;
import com.cloud.api.response.UserVmResponse;
import com.cloud.api.response.VlanIpRangeResponse;
import com.cloud.api.response.VolumeResponse;
import com.cloud.api.response.VpnUsersResponse;
import com.cloud.api.response.ZoneResponse;
import com.cloud.async.AsyncJob;
import com.cloud.async.AsyncJobResult;
import com.cloud.async.executor.IngressRuleResultObject;
import com.cloud.async.executor.NetworkGroupResultObject;
import com.cloud.capacity.Capacity;
import com.cloud.capacity.CapacityVO;
import com.cloud.configuration.Configuration;
import com.cloud.configuration.ResourceCount.ResourceType;
import com.cloud.configuration.ResourceLimit;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenter;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.Pod;
import com.cloud.dc.Vlan;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.domain.Domain;
import com.cloud.event.Event;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.host.Host;
import com.cloud.host.HostStats;
import com.cloud.host.HostVO;
import com.cloud.network.IpAddress;
import com.cloud.network.Network;
import com.cloud.network.RemoteAccessVpn;
import com.cloud.network.VpnUser;
import com.cloud.network.Networks.TrafficType;
import com.cloud.network.router.VirtualRouter;
import com.cloud.network.rules.FirewallRule;
import com.cloud.network.rules.LoadBalancer;
import com.cloud.network.rules.PortForwardingRule;
import com.cloud.network.security.IngressRule;
import com.cloud.network.security.NetworkGroup;
import com.cloud.network.security.NetworkGroupRules;
import com.cloud.offering.DiskOffering;
import com.cloud.offering.NetworkOffering;
import com.cloud.offering.NetworkOffering.GuestIpType;
import com.cloud.offering.ServiceOffering;
import com.cloud.org.Cluster;
import com.cloud.server.Criteria;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.GuestOS;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.Snapshot;
import com.cloud.storage.Snapshot.Type;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.Storage.TemplateType;
import com.cloud.storage.StoragePool;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.StorageStats;
import com.cloud.storage.UploadVO;
import com.cloud.storage.VMTemplateHostVO;
import com.cloud.storage.VMTemplateStorageResourceAssoc.Status;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.Volume;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.preallocatedlun.PreallocatedLunVO;
import com.cloud.storage.snapshot.SnapshotPolicy;
import com.cloud.template.VirtualMachineTemplate;
import com.cloud.test.PodZoneConfig;
import com.cloud.user.Account;
import com.cloud.user.AccountVO;
import com.cloud.user.User;
import com.cloud.user.UserAccount;
import com.cloud.user.UserContext;
import com.cloud.user.UserStatisticsVO;
import com.cloud.uservm.UserVm;
import com.cloud.utils.Pair;
import com.cloud.utils.net.NetUtils;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.InstanceGroup;
import com.cloud.vm.InstanceGroupVO;
import com.cloud.vm.Nic;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.State;
import com.cloud.vm.SystemVm;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VmStats;
public class ApiResponseHelper implements ResponseGenerator {
public final Logger s_logger = Logger.getLogger(ApiResponseHelper.class);
@Override
public UserResponse createUserResponse(UserAccount user) {
UserResponse userResponse = new UserResponse();
userResponse.setAccountName(user.getAccountName());
userResponse.setAccountType(user.getType());
userResponse.setCreated(user.getCreated());
userResponse.setDomainId(user.getDomainId());
userResponse.setDomainName(ApiDBUtils.findDomainById(user.getDomainId()).getName());
userResponse.setEmail(user.getEmail());
userResponse.setFirstname(user.getFirstname());
userResponse.setId(user.getId());
userResponse.setLastname(user.getLastname());
userResponse.setState(user.getState());
userResponse.setTimezone(user.getTimezone());
userResponse.setUsername(user.getUsername());
userResponse.setApiKey(user.getApiKey());
userResponse.setSecretKey(user.getSecretKey());
userResponse.setObjectName("user");
return userResponse;
}
@Override
public UserResponse createUserResponse(User user) {
UserResponse userResponse = new UserResponse();
Account account = ApiDBUtils.findAccountById(user.getAccountId());
userResponse.setAccountName(account.getAccountName());
userResponse.setAccountType(account.getType());
userResponse.setCreated(user.getCreated());
userResponse.setDomainId(account.getDomainId());
userResponse.setDomainName(ApiDBUtils.findDomainById(account.getDomainId()).getName());
userResponse.setEmail(user.getEmail());
userResponse.setFirstname(user.getFirstname());
userResponse.setId(user.getId());
userResponse.setLastname(user.getLastname());
userResponse.setState(user.getState());
userResponse.setTimezone(user.getTimezone());
userResponse.setUsername(user.getUsername());
userResponse.setApiKey(user.getApiKey());
userResponse.setSecretKey(user.getSecretKey());
userResponse.setObjectName("user");
return userResponse;
}
//this method is used for response generation via createAccount (which creates an account + user)
@Override
public UserResponse createUserAccountResponse(UserAccount user) {
UserResponse userResponse = new UserResponse();
userResponse.setAccountName(user.getAccountName());
userResponse.setAccountType(user.getType());
userResponse.setCreated(user.getCreated());
userResponse.setDomainId(user.getDomainId());
userResponse.setDomainName(ApiDBUtils.findDomainById(user.getDomainId()).getName());
userResponse.setEmail(user.getEmail());
userResponse.setFirstname(user.getFirstname());
userResponse.setId(user.getId());
userResponse.setLastname(user.getLastname());
userResponse.setState(user.getState());
userResponse.setTimezone(user.getTimezone());
userResponse.setUsername(user.getUsername());
userResponse.setApiKey(user.getApiKey());
userResponse.setSecretKey(user.getSecretKey());
userResponse.setObjectName("account");
return userResponse;
}
@Override
public AccountResponse createAccountResponse(Account account) {
boolean accountIsAdmin = (account.getType() == Account.ACCOUNT_TYPE_ADMIN);
AccountResponse accountResponse = new AccountResponse();
accountResponse.setId(account.getId());
accountResponse.setName(account.getAccountName());
accountResponse.setAccountType(account.getType());
accountResponse.setDomainId(account.getDomainId());
accountResponse.setDomainName(ApiDBUtils.findDomainById(account.getDomainId()).getName());
accountResponse.setState(account.getState());
// get network stat
List<UserStatisticsVO> stats = ApiDBUtils.listUserStatsBy(account.getId());
if (stats == null) {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Internal error searching for user stats");
}
Long bytesSent = 0L;
Long bytesReceived = 0L;
for (UserStatisticsVO stat : stats) {
Long rx = stat.getNetBytesReceived() + stat.getCurrentBytesReceived();
Long tx = stat.getNetBytesSent() + stat.getCurrentBytesSent();
bytesReceived = bytesReceived + Long.valueOf(rx);
bytesSent = bytesSent + Long.valueOf(tx);
}
accountResponse.setBytesReceived(bytesReceived);
accountResponse.setBytesSent(bytesSent);
// Get resource limits and counts
Long vmLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.user_vm, account.getId());
String vmLimitDisplay = (accountIsAdmin || vmLimit == -1) ? "Unlimited" : String.valueOf(vmLimit);
Long vmTotal = ApiDBUtils.getResourceCount(ResourceType.user_vm, account.getId());
String vmAvail = (accountIsAdmin || vmLimit == -1) ? "Unlimited" : String.valueOf(vmLimit - vmTotal);
accountResponse.setVmLimit(vmLimitDisplay);
accountResponse.setVmTotal(vmTotal);
accountResponse.setVmAvailable(vmAvail);
Long ipLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.public_ip, account.getId());
String ipLimitDisplay = (accountIsAdmin || ipLimit == -1) ? "Unlimited" : String.valueOf(ipLimit);
Long ipTotal = ApiDBUtils.getResourceCount(ResourceType.public_ip, account.getId());
String ipAvail = (accountIsAdmin || ipLimit == -1) ? "Unlimited" : String.valueOf(ipLimit - ipTotal);
accountResponse.setIpLimit(ipLimitDisplay);
accountResponse.setIpTotal(ipTotal);
accountResponse.setIpAvailable(ipAvail);
Long volumeLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.volume, account.getId());
String volumeLimitDisplay = (accountIsAdmin || volumeLimit == -1) ? "Unlimited" : String.valueOf(volumeLimit);
Long volumeTotal = ApiDBUtils.getResourceCount(ResourceType.volume, account.getId());
String volumeAvail = (accountIsAdmin || volumeLimit == -1) ? "Unlimited" : String.valueOf(volumeLimit - volumeTotal);
accountResponse.setVolumeLimit(volumeLimitDisplay);
accountResponse.setVolumeTotal(volumeTotal);
accountResponse.setVolumeAvailable(volumeAvail);
Long snapshotLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.snapshot, account.getId());
String snapshotLimitDisplay = (accountIsAdmin || snapshotLimit == -1) ? "Unlimited" : String.valueOf(snapshotLimit);
Long snapshotTotal = ApiDBUtils.getResourceCount(ResourceType.snapshot, account.getId());
String snapshotAvail = (accountIsAdmin || snapshotLimit == -1) ? "Unlimited" : String.valueOf(snapshotLimit - snapshotTotal);
accountResponse.setSnapshotLimit(snapshotLimitDisplay);
accountResponse.setSnapshotTotal(snapshotTotal);
accountResponse.setSnapshotAvailable(snapshotAvail);
Long templateLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.template, account.getId());
String templateLimitDisplay = (accountIsAdmin || templateLimit == -1) ? "Unlimited" : String.valueOf(templateLimit);
Long templateTotal = ApiDBUtils.getResourceCount(ResourceType.template, account.getId());
String templateAvail = (accountIsAdmin || templateLimit == -1) ? "Unlimited" : String.valueOf(templateLimit - templateTotal);
accountResponse.setTemplateLimit(templateLimitDisplay);
accountResponse.setTemplateTotal(templateTotal);
accountResponse.setTemplateAvailable(templateAvail);
// Get stopped and running VMs
int vmStopped = 0;
int vmRunning = 0;
Long[] accountIds = new Long[1];
accountIds[0] = account.getId();
Criteria c1 = new Criteria();
c1.addCriteria(Criteria.ACCOUNTID, accountIds);
List<? extends UserVm> virtualMachines = ApiDBUtils.searchForUserVMs(c1);
// get Running/Stopped VMs
for (Iterator<? extends UserVm> iter = virtualMachines.iterator(); iter.hasNext();) {
// count how many stopped/running vms we have
UserVm vm = iter.next();
if (vm.getState() == State.Stopped) {
vmStopped++;
} else if (vm.getState() == State.Running) {
vmRunning++;
}
}
accountResponse.setVmStopped(vmStopped);
accountResponse.setVmRunning(vmRunning);
accountResponse.setObjectName("account");
return accountResponse;
}
@Override
public DomainResponse createDomainResponse(Domain domain) {
DomainResponse domainResponse = new DomainResponse();
domainResponse.setDomainName(domain.getName());
domainResponse.setId(domain.getId());
domainResponse.setLevel(domain.getLevel());
domainResponse.setParentDomainId(domain.getParent());
if (domain.getParent() != null) {
domainResponse.setParentDomainName(ApiDBUtils.findDomainById(domain.getParent()).getName());
}
if (domain.getChildCount() > 0) {
domainResponse.setHasChild(true);
}
domainResponse.setObjectName("domain");
return domainResponse;
}
@Override
public DiskOfferingResponse createDiskOfferingResponse(DiskOffering offering) {
DiskOfferingResponse diskOfferingResponse = new DiskOfferingResponse();
diskOfferingResponse.setId(offering.getId());
diskOfferingResponse.setName(offering.getName());
diskOfferingResponse.setDisplayText(offering.getDisplayText());
diskOfferingResponse.setCreated(offering.getCreated());
diskOfferingResponse.setDiskSize(offering.getDiskSize());
if (offering.getDomainId() != null) {
diskOfferingResponse.setDomain(ApiDBUtils.findDomainById(offering.getDomainId()).getName());
diskOfferingResponse.setDomainId(offering.getDomainId());
}
diskOfferingResponse.setTags(offering.getTags());
diskOfferingResponse.setCustomized(offering.isCustomized());
diskOfferingResponse.setObjectName("diskoffering");
return diskOfferingResponse;
}
@Override
public ResourceLimitResponse createResourceLimitResponse(ResourceLimit limit) {
ResourceLimitResponse resourceLimitResponse = new ResourceLimitResponse();
if (limit.getDomainId() != null) {
resourceLimitResponse.setDomainId(limit.getDomainId());
resourceLimitResponse.setDomainName(ApiDBUtils.findDomainById(limit.getDomainId()).getName());
}
if (limit.getAccountId() != null) {
Account accountTemp = ApiDBUtils.findAccountById(limit.getAccountId());
if (accountTemp != null) {
resourceLimitResponse.setAccountName(accountTemp.getAccountName());
resourceLimitResponse.setDomainId(accountTemp.getDomainId());
resourceLimitResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
}
resourceLimitResponse.setResourceType(Integer.valueOf(limit.getType().ordinal()).toString());
resourceLimitResponse.setMax(limit.getMax());
resourceLimitResponse.setObjectName("resourcelimit");
return resourceLimitResponse;
}
@Override
public ServiceOfferingResponse createServiceOfferingResponse(ServiceOffering offering) {
ServiceOfferingResponse offeringResponse = new ServiceOfferingResponse();
offeringResponse.setId(offering.getId());
offeringResponse.setName(offering.getName());
offeringResponse.setDisplayText(offering.getDisplayText());
offeringResponse.setCpuNumber(offering.getCpu());
offeringResponse.setCpuSpeed(offering.getSpeed());
offeringResponse.setMemory(offering.getRamSize());
offeringResponse.setCreated(offering.getCreated());
offeringResponse.setStorageType(offering.getUseLocalStorage() ? "local" : "shared");
offeringResponse.setOfferHa(offering.getOfferHA());
offeringResponse.setUseVirtualNetwork(offering.getGuestIpType().equals(GuestIpType.Virtual));
offeringResponse.setTags(offering.getTags());
if(offering.getDomainId() != null){
offeringResponse.setDomain(ApiDBUtils.findDomainById(offering.getDomainId()).getName());
offeringResponse.setDomainId(offering.getDomainId());
}
offeringResponse.setObjectName("serviceoffering");
return offeringResponse;
}
@Override
public ConfigurationResponse createConfigurationResponse(Configuration cfg) {
ConfigurationResponse cfgResponse = new ConfigurationResponse();
cfgResponse.setCategory(cfg.getCategory());
cfgResponse.setDescription(cfg.getDescription());
cfgResponse.setName(cfg.getName());
cfgResponse.setValue(cfg.getValue());
cfgResponse.setObjectName("configuration");
return cfgResponse;
}
@Override
public SnapshotResponse createSnapshotResponse(Snapshot snapshot) {
SnapshotResponse snapshotResponse = new SnapshotResponse();
snapshotResponse.setId(snapshot.getId());
Account acct = ApiDBUtils.findAccountById(Long.valueOf(snapshot.getAccountId()));
if (acct != null) {
snapshotResponse.setAccountName(acct.getAccountName());
snapshotResponse.setDomainId(acct.getDomainId());
snapshotResponse.setDomainName(ApiDBUtils.findDomainById(acct.getDomainId()).getName());
}
VolumeVO volume = findVolumeById(snapshot.getVolumeId());
String snapshotTypeStr = Type.values()[snapshot.getSnapshotType()].name();
snapshotResponse.setSnapshotType(snapshotTypeStr);
snapshotResponse.setVolumeId(snapshot.getVolumeId());
if( volume != null ) {
snapshotResponse.setVolumeName(volume.getName());
snapshotResponse.setVolumeType(volume.getVolumeType().name());
}
snapshotResponse.setCreated(snapshot.getCreated());
snapshotResponse.setName(snapshot.getName());
snapshotResponse.setIntervalType(ApiDBUtils.getSnapshotIntervalTypes(snapshot.getId()));
snapshotResponse.setObjectName("snapshot");
return snapshotResponse;
}
@Override
public SnapshotPolicyResponse createSnapshotPolicyResponse(SnapshotPolicy policy) {
SnapshotPolicyResponse policyResponse = new SnapshotPolicyResponse();
policyResponse.setId(policy.getId());
policyResponse.setVolumeId(policy.getVolumeId());
policyResponse.setSchedule(policy.getSchedule());
policyResponse.setIntervalType(policy.getInterval());
policyResponse.setMaxSnaps(policy.getMaxSnaps());
policyResponse.setTimezone(policy.getTimezone());
policyResponse.setObjectName("snapshotpolicy");
return policyResponse;
}
@Override
public HostResponse createHostResponse(Host host) {
HostResponse hostResponse = new HostResponse();
hostResponse.setId(host.getId());
hostResponse.setCapabilities(host.getCapabilities());
hostResponse.setClusterId(host.getClusterId());
hostResponse.setCpuNumber(host.getCpus());
hostResponse.setZoneId(host.getDataCenterId());
hostResponse.setDisconnectedOn(host.getDisconnectedOn());
hostResponse.setHypervisor(host.getHypervisorType());
hostResponse.setHostType(host.getType());
hostResponse.setLastPinged(new Date(host.getLastPinged()));
hostResponse.setManagementServerId(host.getManagementServerId());
hostResponse.setName(host.getName());
hostResponse.setPodId(host.getPodId());
hostResponse.setRemoved(host.getRemoved());
hostResponse.setCpuSpeed(host.getSpeed());
hostResponse.setState(host.getStatus());
hostResponse.setIpAddress(host.getPrivateIpAddress());
hostResponse.setVersion(host.getVersion());
hostResponse.setCreated(host.getCreated());
GuestOSCategoryVO guestOSCategory = ApiDBUtils.getHostGuestOSCategory(host.getId());
if (guestOSCategory != null) {
hostResponse.setOsCategoryId(guestOSCategory.getId());
hostResponse.setOsCategoryName(guestOSCategory.getName());
}
hostResponse.setZoneName(ApiDBUtils.findZoneById(host.getDataCenterId()).getName());
if (host.getPodId() != null) {
hostResponse.setPodName(ApiDBUtils.findPodById(host.getPodId()).getName());
}
DecimalFormat decimalFormat = new DecimalFormat("#.##");
// calculate cpu allocated by vm
if ((host.getCpus() != null) && (host.getSpeed() != null)) {
int cpu = 0;
String cpuAlloc = null;
List<UserVmVO> instances = ApiDBUtils.listUserVMsByHostId(host.getId());
for (UserVmVO vm : instances) {
ServiceOffering so = ApiDBUtils.findServiceOfferingById(vm.getServiceOfferingId());
cpu += so.getCpu() * so.getSpeed();
}
cpuAlloc = decimalFormat.format(((float) cpu / (float) (host.getCpus() * host.getSpeed())) * 100f) + "%";
hostResponse.setCpuAllocated(cpuAlloc);
}
// calculate cpu utilized
String cpuUsed = null;
HostStats hostStats = ApiDBUtils.getHostStatistics(host.getId());
if (hostStats != null) {
float cpuUtil = (float) hostStats.getCpuUtilization();
cpuUsed = decimalFormat.format(cpuUtil) + "%";
hostResponse.setCpuUsed(cpuUsed);
hostResponse.setAverageLoad(Double.doubleToLongBits(hostStats.getAverageLoad()));
hostResponse.setNetworkKbsRead(Double.doubleToLongBits(hostStats.getNetworkReadKBs()));
hostResponse.setNetworkKbsWrite(Double.doubleToLongBits(hostStats.getNetworkWriteKBs()));
}
if (host.getType() == Host.Type.Routing) {
hostResponse.setMemoryTotal(host.getTotalMemory());
// calculate memory allocated by systemVM and userVm
Long mem = ApiDBUtils.getMemoryUsagebyHost(host.getId());
hostResponse.setMemoryAllocated(mem);
hostResponse.setMemoryUsed(mem);
} else if (host.getType().toString().equals("Storage")) {
hostResponse.setDiskSizeTotal(host.getTotalSize());
hostResponse.setDiskSizeAllocated(0L);
}
if (host.getClusterId() != null) {
ClusterVO cluster = ApiDBUtils.findClusterById(host.getClusterId());
hostResponse.setClusterName(cluster.getName());
}
hostResponse.setLocalStorageActive(ApiDBUtils.isLocalStorageActiveOnHost(host));
Set<com.cloud.host.Status.Event> possibleEvents = host.getStatus().getPossibleEvents();
if ((possibleEvents != null) && !possibleEvents.isEmpty()) {
String events = "";
Iterator<com.cloud.host.Status.Event> iter = possibleEvents.iterator();
while (iter.hasNext()) {
com.cloud.host.Status.Event event = iter.next();
events += event.toString();
if (iter.hasNext()) {
events += "; ";
}
}
hostResponse.setEvents(events);
}
hostResponse.setObjectName("host");
return hostResponse;
}
@Override
public VlanIpRangeResponse createVlanIpRangeResponse(Vlan vlan) {
Long podId = ApiDBUtils.getPodIdForVlan(vlan.getId());
VlanIpRangeResponse vlanResponse = new VlanIpRangeResponse();
vlanResponse.setId(vlan.getId());
vlanResponse.setForVirtualNetwork(vlan.getVlanType().equals(VlanType.VirtualNetwork));
vlanResponse.setVlan(vlan.getVlanId());
vlanResponse.setZoneId(vlan.getDataCenterId());
if (podId != null) {
HostPodVO pod = ApiDBUtils.findPodById(podId);
vlanResponse.setPodId(podId);
vlanResponse.setPodName(pod.getName());
}
vlanResponse.setGateway(vlan.getVlanGateway());
vlanResponse.setNetmask(vlan.getVlanNetmask());
//get start ip and end ip of corresponding vlan
String ipRange = vlan.getIpRange();
String[] range = ipRange.split("-");
vlanResponse.setStartIp(range[0]);
vlanResponse.setEndIp(range[1]);
Long networkId = vlan.getNetworkId();
if (networkId != null) {
vlanResponse.setNetworkId(vlan.getNetworkId());
Network network = ApiDBUtils.findNetworkById(networkId);
if (network != null) {
Long accountId = network.getAccountId();
//Set account information
if (accountId != null) {
Account account = ApiDBUtils.findAccountById(accountId);
vlanResponse.setAccountName(account.getAccountName());
vlanResponse.setDomainId(account.getDomainId());
vlanResponse.setDomainName(ApiDBUtils.findDomainById(account.getDomainId()).getName());
}
}
}
vlanResponse.setObjectName("vlan");
return vlanResponse;
}
@Override
public IPAddressResponse createIPAddressResponse(IpAddress ipAddress) {
VlanVO vlan = ApiDBUtils.findVlanById(ipAddress.getVlanId());
boolean forVirtualNetworks = vlan.getVlanType().equals(VlanType.VirtualNetwork);
IPAddressResponse ipResponse = new IPAddressResponse();
ipResponse.setIpAddress(ipAddress.getAddress());
if (ipAddress.getAllocatedTime() != null) {
ipResponse.setAllocated(ipAddress.getAllocatedTime());
}
ipResponse.setZoneId(ipAddress.getDataCenterId());
ipResponse.setZoneName(ApiDBUtils.findZoneById(ipAddress.getDataCenterId()).getName());
ipResponse.setSourceNat(ipAddress.isSourceNat());
// get account information
Account accountTemp = ApiDBUtils.findAccountById(ipAddress.getAllocatedToAccountId());
if (accountTemp != null) {
ipResponse.setAccountName(accountTemp.getAccountName());
ipResponse.setDomainId(accountTemp.getDomainId());
ipResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
ipResponse.setForVirtualNetwork(forVirtualNetworks);
ipResponse.setStaticNat(ipAddress.isOneToOneNat());
// show this info to admin only
Account account = UserContext.current().getAccount();
if ((account == null) || account.getType() == Account.ACCOUNT_TYPE_ADMIN) {
ipResponse.setVlanId(ipAddress.getVlanId());
ipResponse.setVlanName(ApiDBUtils.findVlanById(ipAddress.getVlanId()).getVlanId());
}
ipResponse.setObjectName("ipaddress");
return ipResponse;
}
@Override
public LoadBalancerResponse createLoadBalancerResponse(LoadBalancer loadBalancer) {
LoadBalancerResponse lbResponse = new LoadBalancerResponse();
lbResponse.setId(loadBalancer.getId());
lbResponse.setName(loadBalancer.getName());
lbResponse.setDescription(loadBalancer.getDescription());
lbResponse.setPublicIp(loadBalancer.getSourceIpAddress().toString());
lbResponse.setPublicPort(Integer.toString(loadBalancer.getSourcePortStart()));
lbResponse.setPrivatePort(Integer.toString(loadBalancer.getDefaultPortStart()));
lbResponse.setAlgorithm(loadBalancer.getAlgorithm());
Account accountTemp = ApiDBUtils.findAccountById(loadBalancer.getAccountId());
if (accountTemp != null) {
lbResponse.setAccountName(accountTemp.getAccountName());
lbResponse.setDomainId(accountTemp.getDomainId());
lbResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
lbResponse.setObjectName("loadbalancer");
return lbResponse;
}
@Override
public PodResponse createPodResponse(Pod pod) {
String[] ipRange = new String[2];
if (pod.getDescription() != null && pod.getDescription().length() > 0) {
ipRange = pod.getDescription().split("-");
} else {
ipRange[0] = pod.getDescription();
}
PodResponse podResponse = new PodResponse();
podResponse.setId(pod.getId());
podResponse.setName(pod.getName());
podResponse.setZoneId(pod.getDataCenterId());
podResponse.setZoneName(PodZoneConfig.getZoneName(pod.getDataCenterId()));
podResponse.setNetmask(NetUtils.getCidrNetmask(pod.getCidrSize()));
podResponse.setStartIp(ipRange[0]);
podResponse.setEndIp(((ipRange.length > 1) && (ipRange[1] != null)) ? ipRange[1] : "");
podResponse.setGateway(pod.getGateway());
podResponse.setObjectName("pod");
return podResponse;
}
@Override
public ZoneResponse createZoneResponse(DataCenter dataCenter) {
Account account = UserContext.current().getAccount();
ZoneResponse zoneResponse = new ZoneResponse();
zoneResponse.setId(dataCenter.getId());
zoneResponse.setName(dataCenter.getName());
if ((dataCenter.getDescription() != null) && !dataCenter.getDescription().equalsIgnoreCase("null")) {
zoneResponse.setDescription(dataCenter.getDescription());
}
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
zoneResponse.setDns1(dataCenter.getDns1());
zoneResponse.setDns2(dataCenter.getDns2());
zoneResponse.setInternalDns1(dataCenter.getInternalDns1());
zoneResponse.setInternalDns2(dataCenter.getInternalDns2());
zoneResponse.setVlan(dataCenter.getVnet());
zoneResponse.setGuestCidrAddress(dataCenter.getGuestNetworkCidr());
}
zoneResponse.setDomain(dataCenter.getDomain());
zoneResponse.setDomainId(dataCenter.getDomainId());
zoneResponse.setType(dataCenter.getNetworkType().toString());
zoneResponse.setObjectName("zone");
return zoneResponse;
}
@Override
public VolumeResponse createVolumeResponse(Volume volume) {
VolumeResponse volResponse = new VolumeResponse();
volResponse.setId(volume.getId());
if (volume.getName() != null) {
volResponse.setName(volume.getName());
} else {
volResponse.setName("");
}
volResponse.setZoneId(volume.getDataCenterId());
volResponse.setZoneName(ApiDBUtils.findZoneById(volume.getDataCenterId()).getName());
volResponse.setVolumeType(volume.getVolumeType().toString());
volResponse.setDeviceId(volume.getDeviceId());
Long instanceId = volume.getInstanceId();
if (instanceId != null) {
VMInstanceVO vm = ApiDBUtils.findVMInstanceById(instanceId);
volResponse.setVirtualMachineId(vm.getId());
volResponse.setVirtualMachineName(vm.getName());
volResponse.setVirtualMachineDisplayName(vm.getName());
volResponse.setVirtualMachineState(vm.getState().toString());
}
// Show the virtual size of the volume
volResponse.setSize(volume.getSize());
volResponse.setCreated(volume.getCreated());
volResponse.setState(volume.getStatus().toString());
Account accountTemp = ApiDBUtils.findAccountById(volume.getAccountId());
if (accountTemp != null) {
volResponse.setAccountName(accountTemp.getAccountName());
volResponse.setDomainId(accountTemp.getDomainId());
volResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
String storageType;
try {
if (volume.getPoolId() == null) {
if (volume.getState() == Volume.State.Allocated) {
/* set it as shared, so the UI can attach it to VM */
storageType = "shared";
} else {
storageType = "unknown";
}
} else {
storageType = ApiDBUtils.volumeIsOnSharedStorage(volume.getId()) ? "shared" : "local";
}
} catch (InvalidParameterValueException e) {
s_logger.error(e.getMessage(), e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Volume " + volume.getName() + " does not have a valid ID");
}
volResponse.setStorageType(storageType);
if (volume.getVolumeType().equals(Volume.VolumeType.ROOT)) {
volResponse.setServiceOfferingId(volume.getDiskOfferingId());
} else {
volResponse.setDiskOfferingId(volume.getDiskOfferingId());
}
DiskOfferingVO diskOffering = ApiDBUtils.findDiskOfferingById(volume.getDiskOfferingId());
if (volume.getVolumeType().equals(Volume.VolumeType.ROOT)) {
volResponse.setServiceOfferingName(diskOffering.getName());
volResponse.setServiceOfferingDisplayText(diskOffering.getDisplayText());
} else {
volResponse.setDiskOfferingName(diskOffering.getName());
volResponse.setDiskOfferingDisplayText(diskOffering.getDisplayText());
}
Long poolId = volume.getPoolId();
String poolName = (poolId == null) ? "none" : ApiDBUtils.findStoragePoolById(poolId).getName();
volResponse.setStoragePoolName(poolName);
volResponse.setSourceId(volume.getSourceId());
if (volume.getSourceType() != null) {
volResponse.setSourceType(volume.getSourceType().toString());
}
volResponse.setHypervisor(ApiDBUtils.getVolumeHyperType(volume.getId()).toString());
volResponse.setAttached(volume.getAttached());
volResponse.setDestroyed(volume.getDestroyed());
volResponse.setObjectName("volume");
return volResponse;
}
@Override
public InstanceGroupResponse createInstanceGroupResponse(InstanceGroup group) {
InstanceGroupResponse groupResponse = new InstanceGroupResponse();
groupResponse.setId(group.getId());
groupResponse.setName(group.getName());
groupResponse.setCreated(group.getCreated());
Account accountTemp = ApiDBUtils.findAccountById(group.getAccountId());
if (accountTemp != null) {
groupResponse.setAccountName(accountTemp.getAccountName());
groupResponse.setDomainId(accountTemp.getDomainId());
groupResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
groupResponse.setObjectName("instancegroup");
return groupResponse;
}
@Override
public PreallocatedLunResponse createPreallocatedLunResponse(Object result) {
PreallocatedLunVO preallocatedLun = (PreallocatedLunVO)result;
PreallocatedLunResponse preallocLunResponse = new PreallocatedLunResponse();
preallocLunResponse.setId(preallocatedLun.getId());
preallocLunResponse.setVolumeId(preallocatedLun.getVolumeId());
preallocLunResponse.setZoneId(preallocatedLun.getDataCenterId());
preallocLunResponse.setLun(preallocatedLun.getLun());
preallocLunResponse.setPortal(preallocatedLun.getPortal());
preallocLunResponse.setSize(preallocatedLun.getSize());
preallocLunResponse.setTaken(preallocatedLun.getTaken());
preallocLunResponse.setTargetIqn(preallocatedLun.getTargetIqn());
preallocLunResponse.setObjectName("preallocatedlun");
return preallocLunResponse;
}
@Override
public StoragePoolResponse createStoragePoolResponse(StoragePool pool) {
StoragePoolResponse poolResponse = new StoragePoolResponse();
poolResponse.setId(pool.getId());
poolResponse.setName(pool.getName());
poolResponse.setState(pool.getStatus());
poolResponse.setPath(pool.getPath());
poolResponse.setIpAddress(pool.getHostAddress());
poolResponse.setZoneId(pool.getDataCenterId());
poolResponse.setZoneName(ApiDBUtils.findZoneById(pool.getDataCenterId()).getName());
if (pool.getPoolType() != null) {
poolResponse.setType(pool.getPoolType().toString());
}
if (pool.getPodId() != null) {
poolResponse.setPodId(pool.getPodId());
poolResponse.setPodName(ApiDBUtils.findPodById(pool.getPodId()).getName());
}
if (pool.getCreated() != null) {
poolResponse.setCreated(pool.getCreated());
}
StorageStats stats = ApiDBUtils.getStoragePoolStatistics(pool.getId());
Long capacity = pool.getCapacityBytes();
Long available = pool.getAvailableBytes();
Long used = capacity - available;
if (stats != null) {
used = stats.getByteUsed();
available = capacity - used;
}
poolResponse.setDiskSizeTotal(pool.getCapacityBytes());
poolResponse.setDiskSizeAllocated(used);
if (pool.getClusterId() != null) {
ClusterVO cluster = ApiDBUtils.findClusterById(pool.getClusterId());
poolResponse.setClusterId(cluster.getId());
poolResponse.setClusterName(cluster.getName());
}
poolResponse.setTags(ApiDBUtils.getStoragePoolTags(pool.getId()));
poolResponse.setObjectName("storagepool");
return poolResponse;
}
@Override
public ClusterResponse createClusterResponse(Cluster cluster) {
ClusterResponse clusterResponse = new ClusterResponse();
clusterResponse.setId(cluster.getId());
clusterResponse.setName(cluster.getName());
clusterResponse.setPodId(cluster.getPodId());
clusterResponse.setZoneId(cluster.getDataCenterId());
HostPodVO pod = ApiDBUtils.findPodById(cluster.getPodId());
clusterResponse.setPodName(pod.getName());
DataCenterVO zone = ApiDBUtils.findZoneById(cluster.getDataCenterId());
clusterResponse.setZoneName(zone.getName());
clusterResponse.setObjectName("cluster");
return clusterResponse;
}
@Override
public FirewallRuleResponse createFirewallRuleResponse(PortForwardingRule fwRule) {
FirewallRuleResponse response = new FirewallRuleResponse();
response.setId(fwRule.getId());
response.setPrivatePort(Integer.toString(fwRule.getDestinationPortStart()));
response.setProtocol(fwRule.getProtocol());
response.setPublicPort(Integer.toString(fwRule.getSourcePortStart()));
response.setPublicIpAddress(fwRule.getSourceIpAddress().toString());
if (fwRule.getSourceIpAddress() != null && fwRule.getDestinationIpAddress() != null) {
UserVm vm = ApiDBUtils.findUserVmById(fwRule.getVirtualMachineId());
if(vm != null){
response.setVirtualMachineId(vm.getId());
response.setVirtualMachineName(vm.getName());
response.setVirtualMachineDisplayName(vm.getDisplayName());
}
}
FirewallRule.State state = fwRule.getState();
String stateToSet = state.toString();
if (state.equals(FirewallRule.State.Revoke)) {
stateToSet = "Deleting";
}
response.setState(stateToSet);
response.setObjectName("portforwardingrule");
return response;
}
@Override
public IpForwardingRuleResponse createIpForwardingRuleResponse(PortForwardingRule fwRule) {
IpForwardingRuleResponse response = new IpForwardingRuleResponse();
response.setId(fwRule.getId());
response.setProtocol(fwRule.getProtocol());
response.setPublicIpAddress(fwRule.getSourceIpAddress().addr());
if (fwRule.getSourceIpAddress() != null && fwRule.getDestinationIpAddress() != null) {
UserVm vm = ApiDBUtils.findUserVmById(fwRule.getVirtualMachineId());
if(vm != null){//vm might be destroyed
response.setVirtualMachineId(vm.getId());
response.setVirtualMachineName(vm.getName());
response.setVirtualMachineDisplayName(vm.getDisplayName());
}
}
FirewallRule.State state = fwRule.getState();
String stateToSet = state.toString();
if (state.equals(FirewallRule.State.Revoke)) {
stateToSet = "Deleting";
}
response.setState(stateToSet);
response.setObjectName("ipforwardingrule");
return response;
}
@Override
public UserVmResponse createUserVmResponse(UserVm userVm) {
UserVmResponse userVmResponse = new UserVmResponse();
Account acct = ApiDBUtils.findAccountById(Long.valueOf(userVm.getAccountId()));
// FIXME - this check should be done in searchForUserVm method in
// ManagementServerImpl;
// otherwise the number of vms returned is not going to match pageSize
// request parameter
if ((acct != null) && (acct.getRemoved() == null)) {
userVmResponse.setAccountName(acct.getAccountName());
userVmResponse.setDomainId(acct.getDomainId());
userVmResponse.setDomainName(ApiDBUtils.findDomainById(acct.getDomainId()).getName());
} else {
return null; // the account has been deleted, skip this VM in the
// response
}
userVmResponse.setId(userVm.getId());
userVmResponse.setName(userVm.getName());
userVmResponse.setCreated(userVm.getCreated());
if (userVm.getState() != null) {
userVmResponse.setState(userVm.getState().toString());
}
userVmResponse.setHaEnable(userVm.isHaEnabled());
if (userVm.getDisplayName() != null) {
userVmResponse.setDisplayName(userVm.getDisplayName());
} else {
userVmResponse.setDisplayName(userVm.getName());
}
InstanceGroupVO group = ApiDBUtils.findInstanceGroupForVM(userVm.getId());
if (group != null) {
userVmResponse.setGroup(group.getName());
userVmResponse.setGroupId(group.getId());
}
// Data Center Info
userVmResponse.setZoneId(userVm.getDataCenterId());
userVmResponse.setZoneName(ApiDBUtils.findZoneById(userVm.getDataCenterId()).getName());
Account account = UserContext.current().getAccount();
// if user is an admin, display host id
if (((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) && (userVm.getHostId() != null)) {
userVmResponse.setHostId(userVm.getHostId());
userVmResponse.setHostName(ApiDBUtils.findHostById(userVm.getHostId()).getName());
}
// Template Info
VMTemplateVO template = ApiDBUtils.findTemplateById(userVm.getTemplateId());
if (template != null) {
userVmResponse.setTemplateId(userVm.getTemplateId());
userVmResponse.setTemplateName(template.getName());
userVmResponse.setTemplateDisplayText(template.getDisplayText());
userVmResponse.setPasswordEnabled(template.getEnablePassword());
} else {
userVmResponse.setTemplateId(-1L);
userVmResponse.setTemplateName("ISO Boot");
userVmResponse.setTemplateDisplayText("ISO Boot");
userVmResponse.setPasswordEnabled(false);
}
if (userVm.getPassword() != null) {
userVmResponse.setPassword(userVm.getPassword());
}
// ISO Info
if (userVm.getIsoId() != null) {
VMTemplateVO iso = ApiDBUtils.findTemplateById(userVm.getIsoId());
if (iso != null) {
userVmResponse.setIsoId(userVm.getIsoId());
userVmResponse.setIsoName(iso.getName());
}
}
// Service Offering Info
ServiceOffering offering = ApiDBUtils.findServiceOfferingById(userVm.getServiceOfferingId());
userVmResponse.setServiceOfferingId(userVm.getServiceOfferingId());
userVmResponse.setServiceOfferingName(offering.getName());
userVmResponse.setCpuNumber(offering.getCpu());
userVmResponse.setCpuSpeed(offering.getSpeed());
userVmResponse.setMemory(offering.getRamSize());
VolumeVO rootVolume = ApiDBUtils.findRootVolume(userVm.getId());
if (rootVolume != null) {
userVmResponse.setRootDeviceId(rootVolume.getDeviceId());
String rootDeviceType = "Not created";
if (rootVolume.getPoolId() != null) {
StoragePoolVO storagePool = ApiDBUtils.findStoragePoolById(rootVolume.getPoolId());
rootDeviceType = storagePool.getPoolType().toString();
}
userVmResponse.setRootDeviceType(rootDeviceType);
}
// stats calculation
DecimalFormat decimalFormat = new DecimalFormat("#.##");
String cpuUsed = null;
VmStats vmStats = ApiDBUtils.getVmStatistics(userVm.getId());
if (vmStats != null) {
float cpuUtil = (float) vmStats.getCPUUtilization();
cpuUsed = decimalFormat.format(cpuUtil) + "%";
userVmResponse.setCpuUsed(cpuUsed);
Long networkKbRead = Double.doubleToLongBits(vmStats.getNetworkReadKBs());
userVmResponse.setNetworkKbsRead(networkKbRead);
Long networkKbWrite = Double.doubleToLongBits(vmStats.getNetworkWriteKBs());
userVmResponse.setNetworkKbsWrite(networkKbWrite);
}
userVmResponse.setGuestOsId(userVm.getGuestOSId());
// network groups
userVmResponse.setNetworkGroupList(ApiDBUtils.getNetworkGroupsNamesForVm(userVm.getId()));
List<? extends Nic> nics = ApiDBUtils.getNics(userVm);
List<NicResponse> nicResponses = new ArrayList<NicResponse>();
for (Nic singleNic : nics) {
NicResponse nicResponse = new NicResponse();
nicResponse.setId(singleNic.getId());
nicResponse.setIpaddress(singleNic.getIp4Address());
nicResponse.setGateway(singleNic.getGateway());
nicResponse.setNetmask(singleNic.getNetmask());
nicResponse.setNetworkid(singleNic.getNetworkId());
if (acct.getType() == Account.ACCOUNT_TYPE_ADMIN) {
if (singleNic.getBroadcastUri() != null) {
nicResponse.setBroadcastUri(singleNic.getBroadcastUri().toString());
}
if (singleNic.getIsolationUri() != null) {
nicResponse.setIsolationUri(singleNic.getIsolationUri().toString());
}
}
//Set traffic type
Network network = ApiDBUtils.findNetworkById(singleNic.getNetworkId());
nicResponse.setTrafficType(network.getTrafficType().toString());
//Set type
NetworkOffering networkOffering = ApiDBUtils.findNetworkOfferingById(network.getNetworkOfferingId());
if (networkOffering.getGuestIpType() != null) {
nicResponse.setType(networkOffering.getGuestIpType().toString());
}
nicResponse.setObjectName("nic");
nicResponses.add(nicResponse);
}
userVmResponse.setNics(nicResponses);
userVmResponse.setObjectName("virtualmachine");
return userVmResponse;
}
@Override
public DomainRouterResponse createDomainRouterResponse(VirtualRouter router) {
DomainRouterResponse routerResponse = new DomainRouterResponse();
routerResponse.setId(router.getId());
routerResponse.setZoneId(router.getDataCenterId());
routerResponse.setName(router.getName());
routerResponse.setPodId(router.getPodId());
routerResponse.setTemplateId(router.getTemplateId());
routerResponse.setCreated(router.getCreated());
routerResponse.setState(router.getState());
routerResponse.setNetworkDomain(router.getDomain());
if (router.getHostId() != null) {
routerResponse.setHostId(router.getHostId());
routerResponse.setHostName(ApiDBUtils.findHostById(router.getHostId()).getName());
}
Account accountTemp = ApiDBUtils.findAccountById(router.getAccountId());
if (accountTemp != null) {
routerResponse.setAccountName(accountTemp.getAccountName());
routerResponse.setDomainId(accountTemp.getDomainId());
routerResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
List<? extends Nic> nics = ApiDBUtils.getNics(router);
for (Nic singleNic : nics) {
Network network = ApiDBUtils.findNetworkById(singleNic.getNetworkId());
if (network != null) {
if (network.getTrafficType() == TrafficType.Public) {
routerResponse.setPublicIp(singleNic.getIp4Address());
routerResponse.setPublicMacAddress(singleNic.getMacAddress());
routerResponse.setPublicNetmask(singleNic.getNetmask());
} else if (network.getTrafficType() == TrafficType.Control) {
routerResponse.setPrivateIp(singleNic.getIp4Address());
routerResponse.setPrivateMacAddress(singleNic.getMacAddress());
routerResponse.setPrivateNetmask(singleNic.getNetmask());
} else if (network.getTrafficType() == TrafficType.Guest) {
routerResponse.setGuestIpAddress(singleNic.getIp4Address());
routerResponse.setGuestMacAddress(singleNic.getMacAddress());
routerResponse.setGuestNetmask(singleNic.getNetmask());
}
}
}
DataCenter zone = ApiDBUtils.findZoneById(router.getDataCenterId());
if (zone != null) {
routerResponse.setZoneName(zone.getName());
routerResponse.setDns1(zone.getDns1());
routerResponse.setDns2(zone.getDns2());
}
routerResponse.setObjectName("domainrouter");
return routerResponse;
}
@Override
public SystemVmResponse createSystemVmResponse(VirtualMachine systemVM) {
SystemVmResponse vmResponse = new SystemVmResponse();
if (systemVM instanceof SystemVm) {
SystemVm vm = (SystemVm) systemVM;
vmResponse.setId(vm.getId());
vmResponse.setSystemVmType(vm.getType().toString().toLowerCase());
vmResponse.setZoneId(vm.getDataCenterId());
vmResponse.setNetworkDomain(vm.getDomain());
vmResponse.setName(vm.getName());
vmResponse.setPodId(vm.getPodId());
vmResponse.setTemplateId(vm.getTemplateId());
vmResponse.setCreated(vm.getCreated());
if (vm.getHostId() != null) {
vmResponse.setHostId(vm.getHostId());
vmResponse.setHostName(ApiDBUtils.findHostById(vm.getHostId()).getName());
}
if (vm.getState() != null) {
vmResponse.setState(vm.getState().toString());
}
String instanceType = "console_proxy";
if (systemVM instanceof SecondaryStorageVmVO) {
instanceType = "sec_storage_vm"; // FIXME: this should be a
// constant so that the async
// jobs get updated with the
// correct instance type, they
// are using
// different instance types at
// the moment
}
// for console proxies, add the active sessions
if (systemVM instanceof ConsoleProxyVO) {
ConsoleProxyVO proxy = (ConsoleProxyVO) systemVM;
vmResponse.setActiveViewerSessions(proxy.getActiveSession());
}
DataCenter zone = ApiDBUtils.findZoneById(vm.getDataCenterId());
if (zone != null) {
vmResponse.setZoneName(zone.getName());
vmResponse.setDns1(zone.getDns1());
vmResponse.setDns2(zone.getDns2());
}
List<? extends Nic> nics = ApiDBUtils.getNics(systemVM);
for (Nic singleNic : nics) {
Network network = ApiDBUtils.findNetworkById(singleNic.getNetworkId());
if (network != null) {
if (network.getTrafficType() == TrafficType.Public) {
vmResponse.setPublicIp(singleNic.getIp4Address());
vmResponse.setPublicMacAddress(singleNic.getMacAddress());
vmResponse.setPublicNetmask(singleNic.getNetmask());
} else if (network.getTrafficType() == TrafficType.Control) {
vmResponse.setPrivateIp(singleNic.getIp4Address());
vmResponse.setPrivateMacAddress(singleNic.getMacAddress());
vmResponse.setPrivateNetmask(singleNic.getNetmask());
}
}
}
}
vmResponse.setObjectName("systemvm");
return vmResponse;
}
@Override
public void synchronizeCommand(Object job, String syncObjType, Long syncObjId) {
ApiDBUtils.synchronizeCommand(job, syncObjType, syncObjId);
}
@Override
public User findUserById(Long userId) {
return ApiDBUtils.findUserById(userId);
}
@Override
public UserVm findUserVmById(Long vmId) {
return ApiDBUtils.findUserVmById(vmId);
}
@Override
public VolumeVO findVolumeById(Long volumeId) {
return ApiDBUtils.findVolumeById(volumeId);
}
@Override
public Account findAccountByNameDomain(String accountName, Long domainId) {
return ApiDBUtils.findAccountByNameDomain(accountName, domainId);
}
@Override
public VirtualMachineTemplate findTemplateById(Long templateId) {
return ApiDBUtils.findTemplateById(templateId);
}
@Override
public VpnUsersResponse createVpnUserResponse(VpnUser vpnUser) {
VpnUsersResponse vpnResponse = new VpnUsersResponse();
vpnResponse.setId(vpnUser.getId());
vpnResponse.setUserName(vpnUser.getUsername());
vpnResponse.setAccountName(vpnUser.getAccountName());
Account accountTemp = ApiDBUtils.findAccountById(vpnUser.getAccountId());
if (accountTemp != null) {
vpnResponse.setDomainId(accountTemp.getDomainId());
vpnResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
vpnResponse.setObjectName("vpnuser");
return vpnResponse;
}
@Override
public RemoteAccessVpnResponse createRemoteAccessVpnResponse(RemoteAccessVpn vpn) {
RemoteAccessVpnResponse vpnResponse = new RemoteAccessVpnResponse();
vpnResponse.setId(vpn.getId());
vpnResponse.setPublicIp(vpn.getVpnServerAddress());
vpnResponse.setIpRange(vpn.getIpRange());
vpnResponse.setPresharedKey(vpn.getIpsecPresharedKey());
vpnResponse.setAccountName(vpn.getAccountName());
Account accountTemp = ApiDBUtils.findAccountById(vpn.getAccountId());
if (accountTemp != null) {
vpnResponse.setDomainId(accountTemp.getDomainId());
vpnResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
vpnResponse.setObjectName("remoteaccessvpn");
return vpnResponse;
}
@Override
public TemplateResponse createIsoResponse(VirtualMachineTemplate result) {
TemplateResponse response = new TemplateResponse();
response.setId(result.getId());
response.setName(result.getName());
response.setDisplayText(result.getDisplayText());
response.setPublic(result.isPublicTemplate());
response.setCreated(result.getCreated());
response.setFormat(result.getFormat());
response.setOsTypeId(result.getGuestOSId());
response.setOsTypeName(ApiDBUtils.findGuestOSById(result.getGuestOSId()).getDisplayName());
if(result.getFormat() == ImageFormat.ISO){ // Templates are always bootable
response.setBootable(result.isBootable());
}else{
response.setHypervisor(result.getHypervisorType().toString());// hypervisors are associated with templates
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(result.getAccountId());
if (owner != null) {
response.setAccount(owner.getAccountName());
response.setDomainId(owner.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
response.setObjectName("iso");
return response;
}
@Override
public void createTemplateResponse(List<TemplateResponse> responses, Pair<Long,Long> templateZonePair, boolean isAdmin, Account account) {
List<VMTemplateHostVO> templateHostRefsForTemplate = ApiDBUtils.listTemplateHostBy(templateZonePair.first(), templateZonePair.second());
VMTemplateVO template = ApiDBUtils.findTemplateById(templateZonePair.first());
for (VMTemplateHostVO templateHostRef : templateHostRefsForTemplate) {
TemplateResponse templateResponse = new TemplateResponse();
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
templateResponse.setCreated(templateHostRef.getCreated());
templateResponse.setReady(templateHostRef.getDownloadState()==Status.DOWNLOADED);
templateResponse.setFeatured(template.isFeatured());
templateResponse.setPasswordEnabled(template.getEnablePassword());
templateResponse.setCrossZones(template.isCrossZones());
templateResponse.setFormat(template.getFormat());
if (template.getTemplateType() != null) {
templateResponse.setTemplateType(template.getTemplateType().toString());
}
templateResponse.setHypervisor(template.getHypervisorType().toString());
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
templateResponse.setOsTypeId(os.getId());
templateResponse.setOsTypeName(os.getDisplayName());
} else {
templateResponse.setOsTypeId(-1L);
templateResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
HostVO host = ApiDBUtils.findHostById(templateHostRef.getHostId());
DataCenterVO datacenter = ApiDBUtils.findZoneById(host.getDataCenterId());
// Add the zone ID
templateResponse.setZoneId(host.getDataCenterId());
templateResponse.setZoneName(datacenter.getName());
// If the user is an admin, add the template download status
if (isAdmin || account.getId() == template.getAccountId()) {
// add download status
if (templateHostRef.getDownloadState()!=Status.DOWNLOADED) {
String templateStatus = "Processing";
if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (templateHostRef.getDownloadPercent() == 100) {
templateStatus = "Installing Template";
} else {
templateStatus = templateHostRef.getDownloadPercent() + "% Downloaded";
}
} else {
templateStatus = templateHostRef.getErrorString();
}
templateResponse.setStatus(templateStatus);
} else if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
templateResponse.setStatus("Download Complete");
} else {
templateResponse.setStatus("Successfully Installed");
}
}
Long templateSize = templateHostRef.getSize();
if (templateSize > 0) {
templateResponse.setSize(templateSize);
}
templateResponse.setObjectName("template");
responses.add(templateResponse);
}
}
@Override
public ListResponse<TemplateResponse> createTemplateResponse2(VirtualMachineTemplate template, Long zoneId) {
ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>();
List<TemplateResponse> responses = new ArrayList<TemplateResponse>();
List<DataCenterVO> zones = null;
if ((zoneId != null) && (zoneId != -1)) {
zones = new ArrayList<DataCenterVO>();
zones.add(ApiDBUtils.findZoneById(zoneId));
} else {
zones = ApiDBUtils.listZones();
}
for (DataCenterVO zone : zones) {
TemplateResponse templateResponse = new TemplateResponse();
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
templateResponse.setExtractable(template.isExtractable());
templateResponse.setCrossZones(template.isCrossZones());
VMTemplateHostVO isoHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), zone.getId());
if (isoHostRef != null) {
templateResponse.setCreated(isoHostRef.getCreated());
templateResponse.setReady(isoHostRef.getDownloadState() == Status.DOWNLOADED);
}
templateResponse.setFeatured(template.isFeatured());
templateResponse.setPasswordEnabled(template.getEnablePassword());
templateResponse.setFormat(template.getFormat());
templateResponse.setStatus("Processing");
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
templateResponse.setOsTypeId(os.getId());
templateResponse.setOsTypeName(os.getDisplayName());
} else {
templateResponse.setOsTypeId(-1L);
templateResponse.setOsTypeName("");
}
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccountId(owner.getId());
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
templateResponse.setZoneId(zone.getId());
templateResponse.setZoneName(zone.getName());
templateResponse.setHypervisor(template.getHypervisorType().toString());
templateResponse.setObjectName("template");
responses.add(templateResponse);
}
response.setResponses(responses);
return response;
}
@Override
public ListResponse<TemplateResponse> createIsoResponses(VirtualMachineTemplate template, Long zoneId) {
ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>();
List<TemplateResponse> responses = new ArrayList<TemplateResponse>();
List<DataCenterVO> zones = null;
if ((zoneId != null) && (zoneId != -1)) {
zones = new ArrayList<DataCenterVO>();
zones.add(ApiDBUtils.findZoneById(zoneId));
} else {
zones = ApiDBUtils.listZones();
}
for (DataCenterVO zone : zones) {
TemplateResponse templateResponse = new TemplateResponse();
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
VMTemplateHostVO isoHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), zone.getId());
if (isoHostRef != null) {
templateResponse.setCreated(isoHostRef.getCreated());
templateResponse.setReady(isoHostRef.getDownloadState() == Status.DOWNLOADED);
}
templateResponse.setFeatured(template.isFeatured());
templateResponse.setBootable(template.isBootable());
templateResponse.setOsTypeId(template.getGuestOSId());
templateResponse.setOsTypeName(ApiDBUtils.findGuestOSById(template.getGuestOSId()).getDisplayName());
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccountId(owner.getId());
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
templateResponse.setZoneId(zone.getId());
templateResponse.setZoneName(zone.getName());
templateResponse.setObjectName("iso");
responses.add(templateResponse);
}
response.setResponses(responses);
return response;
}
@Override
public ListResponse<NetworkGroupResponse> createNetworkGroupResponses(List<? extends NetworkGroupRules> networkGroups) {
List<NetworkGroupResultObject> groupResultObjs = NetworkGroupResultObject.transposeNetworkGroups(networkGroups);
ListResponse<NetworkGroupResponse> response = new ListResponse<NetworkGroupResponse>();
List<NetworkGroupResponse> netGrpResponses = new ArrayList<NetworkGroupResponse>();
for (NetworkGroupResultObject networkGroup : groupResultObjs) {
NetworkGroupResponse netGrpResponse = new NetworkGroupResponse();
netGrpResponse.setId(networkGroup.getId());
netGrpResponse.setName(networkGroup.getName());
netGrpResponse.setDescription(networkGroup.getDescription());
netGrpResponse.setAccountName(networkGroup.getAccountName());
netGrpResponse.setDomainId(networkGroup.getDomainId());
netGrpResponse.setDomainName(ApiDBUtils.findDomainById(networkGroup.getDomainId()).getName());
List<IngressRuleResultObject> ingressRules = networkGroup.getIngressRules();
if ((ingressRules != null) && !ingressRules.isEmpty()) {
List<IngressRuleResponse> ingressRulesResponse = new ArrayList<IngressRuleResponse>();
for (IngressRuleResultObject ingressRule : ingressRules) {
IngressRuleResponse ingressData = new IngressRuleResponse();
ingressData.setRuleId(ingressRule.getId());
ingressData.setProtocol(ingressRule.getProtocol());
if ("icmp".equalsIgnoreCase(ingressRule.getProtocol())) {
ingressData.setIcmpType(ingressRule.getStartPort());
ingressData.setIcmpCode(ingressRule.getEndPort());
} else {
ingressData.setStartPort(ingressRule.getStartPort());
ingressData.setEndPort(ingressRule.getEndPort());
}
if (ingressRule.getAllowedNetworkGroup() != null) {
ingressData.setNetworkGroupName(ingressRule.getAllowedNetworkGroup());
ingressData.setAccountName(ingressRule.getAllowedNetGroupAcct());
} else {
ingressData.setCidr(ingressRule.getAllowedSourceIpCidr());
}
ingressData.setObjectName("ingressrule");
ingressRulesResponse.add(ingressData);
}
netGrpResponse.setIngressRules(ingressRulesResponse);
}
netGrpResponse.setObjectName("securitygroup");
netGrpResponses.add(netGrpResponse);
}
response.setResponses(netGrpResponses);
return response;
}
@Override
public NetworkGroupResponse createNetworkGroupResponse(NetworkGroup group) {
NetworkGroupResponse response = new NetworkGroupResponse();
response.setAccountName(group.getAccountName());
response.setDescription(group.getDescription());
response.setDomainId(group.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(group.getDomainId()).getName());
response.setId(group.getId());
response.setName(group.getName());
response.setObjectName("securitygroup");
return response;
}
@Override
public ExtractResponse createExtractResponse(Long uploadId, Long id, Long zoneId, Long accountId, String mode) {
UploadVO uploadInfo = ApiDBUtils.findUploadById(uploadId);
ExtractResponse response = new ExtractResponse();
response.setObjectName("template");
response.setId(id);
response.setName(ApiDBUtils.findTemplateById(id).getName());
response.setZoneId(zoneId);
response.setZoneName(ApiDBUtils.findZoneById(zoneId).getName());
response.setMode(mode);
response.setUploadId(uploadId);
response.setState(uploadInfo.getUploadState().toString());
response.setAccountId(accountId);
//FIX ME - Need to set the url once the gson jar is upgraded since it is throwing an error right now.
//response.setUrl(uploadInfo.getUploadUrl());
response.setUrl(uploadInfo.getUploadUrl().replaceAll("/", "%2F"));
return response;
}
@Override
public TemplateResponse createTemplateResponse(VirtualMachineTemplate template, Long destZoneId) {
TemplateResponse templateResponse = new TemplateResponse();
if (template != null) {
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
templateResponse.setBootable(template.isBootable());
templateResponse.setFeatured(template.isFeatured());
templateResponse.setCrossZones(template.isCrossZones());
templateResponse.setCreated(template.getCreated());
templateResponse.setFormat(template.getFormat());
templateResponse.setPasswordEnabled(template.getEnablePassword());
templateResponse.setZoneId(destZoneId);
templateResponse.setZoneName(ApiDBUtils.findZoneById(destZoneId).getName());
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
templateResponse.setOsTypeId(os.getId());
templateResponse.setOsTypeName(os.getDisplayName());
} else {
templateResponse.setOsTypeId(-1L);
templateResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
//set status
Account account = UserContext.current().getAccount();
boolean isAdmin = false;
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN) || (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)) {
isAdmin = true;
}
//Return download status for admin users
VMTemplateHostVO templateHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), destZoneId);
if (isAdmin || template.getAccountId() == account.getId()) {
if (templateHostRef.getDownloadState()!=Status.DOWNLOADED) {
String templateStatus = "Processing";
if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (templateHostRef.getDownloadPercent() == 100) {
templateStatus = "Installing Template";
} else {
templateStatus = templateHostRef.getDownloadPercent() + "% Downloaded";
}
} else {
templateStatus = templateHostRef.getErrorString();
}
templateResponse.setStatus(templateStatus);
} else if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
templateResponse.setStatus("Download Complete");
} else {
templateResponse.setStatus("Successfully Installed");
}
}
templateResponse.setReady(templateHostRef != null && templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED);
} else {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Failed to copy template");
}
templateResponse.setObjectName("template");
return templateResponse;
}
@Override
public TemplateResponse createIsoResponse3(VirtualMachineTemplate iso, Long destZoneId) {
TemplateResponse isoResponse = new TemplateResponse();
if (iso != null) {
isoResponse.setId(iso.getId());
isoResponse.setName(iso.getName());
isoResponse.setDisplayText(iso.getDisplayText());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setBootable(iso.isBootable());
isoResponse.setFeatured(iso.isFeatured());
isoResponse.setCrossZones(iso.isCrossZones());
isoResponse.setCreated(iso.getCreated());
isoResponse.setZoneId(destZoneId);
isoResponse.setZoneName(ApiDBUtils.findZoneById(destZoneId).getName());
GuestOS os = ApiDBUtils.findGuestOSById(iso.getGuestOSId());
if (os != null) {
isoResponse.setOsTypeId(os.getId());
isoResponse.setOsTypeName(os.getDisplayName());
} else {
isoResponse.setOsTypeId(-1L);
isoResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(iso.getAccountId());
if (owner != null) {
isoResponse.setAccount(owner.getAccountName());
isoResponse.setDomainId(owner.getDomainId());
isoResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
//set status
Account account = UserContext.current().getAccount();
boolean isAdmin = false;
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN) || (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)) {
isAdmin = true;
}
//Return download status for admin users
VMTemplateHostVO templateHostRef = ApiDBUtils.findTemplateHostRef(iso.getId(), destZoneId);
if (isAdmin || iso.getAccountId() == account.getId()) {
if (templateHostRef.getDownloadState()!=Status.DOWNLOADED) {
String templateStatus = "Processing";
if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (templateHostRef.getDownloadPercent() == 100) {
templateStatus = "Installing Template";
} else {
templateStatus = templateHostRef.getDownloadPercent() + "% Downloaded";
}
} else {
templateStatus = templateHostRef.getErrorString();
}
isoResponse.setStatus(templateStatus);
} else if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
isoResponse.setStatus("Download Complete");
} else {
isoResponse.setStatus("Successfully Installed");
}
}
isoResponse.setReady(templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED);
} else {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Failed to copy iso");
}
isoResponse.setObjectName("iso");
return isoResponse;
}
@Override
public String toSerializedString(CreateCmdResponse response, String responseType) {
return ApiResponseSerializer.toSerializedString(response, responseType);
}
@Override
public AsyncJobResponse createAsyncJobResponse(AsyncJob job) {
AsyncJobResponse jobResponse = new AsyncJobResponse();
jobResponse.setAccountId(job.getAccountId());
jobResponse.setCmd(job.getCmd());
jobResponse.setCreated(job.getCreated());
jobResponse.setId(job.getId());
jobResponse.setJobInstanceId(job.getInstanceId());
jobResponse.setJobInstanceType(job.getInstanceType().toString());
jobResponse.setJobProcStatus(job.getProcessStatus());
jobResponse.setJobResult((ResponseObject)ApiSerializerHelper.fromSerializedString(job.getResult()));
jobResponse.setJobResultCode(job.getResultCode());
jobResponse.setJobStatus(job.getStatus());
jobResponse.setUserId(job.getUserId());
jobResponse.setObjectName("asyncjobs");
return jobResponse;
}
@Override
public TemplateResponse createTemplateResponse(VirtualMachineTemplate template, Long snapshotId, Long volumeId) {
TemplateResponse response = new TemplateResponse();
response.setId(template.getId());
response.setName(template.getName());
response.setDisplayText(template.getDisplayText());
response.setPublic(template.isPublicTemplate());
response.setPasswordEnabled(template.getEnablePassword());
response.setCrossZones(template.isCrossZones());
VolumeVO volume = null;
if (snapshotId != null) {
Snapshot snapshot = ApiDBUtils.findSnapshotById(snapshotId);
volume = findVolumeById(snapshot.getVolumeId());
} else {
volume = findVolumeById(volumeId);
}
VMTemplateHostVO templateHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), volume.getDataCenterId());
response.setCreated(templateHostRef.getCreated());
response.setReady(templateHostRef != null && templateHostRef.getDownloadState() == Status.DOWNLOADED);
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
response.setOsTypeId(os.getId());
response.setOsTypeName(os.getDisplayName());
} else {
response.setOsTypeId(-1L);
response.setOsTypeName("");
}
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
response.setAccount(owner.getAccountName());
response.setDomainId(owner.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
DataCenter zone = ApiDBUtils.findZoneById(volume.getDataCenterId());
if (zone != null) {
response.setZoneId(zone.getId());
response.setZoneName(zone.getName());
}
response.setObjectName("template");
return response;
}
@Override
public EventResponse createEventResponse(Event event) {
EventResponse responseEvent = new EventResponse();
responseEvent.setAccountName(event.getAccountName());
responseEvent.setCreated(event.getCreateDate());
responseEvent.setDescription(event.getDescription());
responseEvent.setDomainId(event.getDomainId());
responseEvent.setEventType(event.getType());
responseEvent.setId(event.getId());
responseEvent.setLevel(event.getLevel());
responseEvent.setParentId(event.getStartId());
responseEvent.setState(event.getState());
responseEvent.setDomainName(ApiDBUtils.findDomainById(event.getDomainId()).getName());
User user = ApiDBUtils.findUserById(event.getUserId());
if (user != null) {
responseEvent.setUsername(user.getUsername());
}
responseEvent.setObjectName("event");
return responseEvent;
}
@Override
public ListResponse<TemplateResponse> createIsoResponse(Set<Pair<Long,Long>> isoZonePairSet, boolean isAdmin, Account account) {
ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>();
List<TemplateResponse> isoResponses = new ArrayList<TemplateResponse>();
for (Pair<Long,Long> isoZonePair : isoZonePairSet) {
VMTemplateVO iso = ApiDBUtils.findTemplateById(isoZonePair.first());
if ( iso.getTemplateType() == TemplateType.PERHOST ) {
TemplateResponse isoResponse = new TemplateResponse();
isoResponse.setId(iso.getId());
isoResponse.setName(iso.getName());
isoResponse.setDisplayText(iso.getDisplayText());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setReady(true);
isoResponse.setBootable(iso.isBootable());
isoResponse.setFeatured(iso.isFeatured());
isoResponse.setCrossZones(iso.isCrossZones());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setObjectName("iso");
isoResponses.add(isoResponse);
response.setResponses(isoResponses);
continue;
}
List<VMTemplateHostVO> isoHosts = ApiDBUtils.listTemplateHostBy(iso.getId(), isoZonePair.second());
for (VMTemplateHostVO isoHost : isoHosts) {
TemplateResponse isoResponse = new TemplateResponse();
isoResponse.setId(iso.getId());
isoResponse.setName(iso.getName());
isoResponse.setDisplayText(iso.getDisplayText());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setCreated(isoHost.getCreated());
isoResponse.setReady(isoHost.getDownloadState() == Status.DOWNLOADED);
isoResponse.setBootable(iso.isBootable());
isoResponse.setFeatured(iso.isFeatured());
isoResponse.setCrossZones(iso.isCrossZones());
isoResponse.setPublic(iso.isPublicTemplate());
// TODO: implement
GuestOS os = ApiDBUtils.findGuestOSById(iso.getGuestOSId());
if (os != null) {
isoResponse.setOsTypeId(os.getId());
isoResponse.setOsTypeName(os.getDisplayName());
} else {
isoResponse.setOsTypeId(-1L);
isoResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(iso.getAccountId());
if (owner != null) {
isoResponse.setAccount(owner.getAccountName());
isoResponse.setDomainId(owner.getDomainId());
// TODO: implement
isoResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
// Add the zone ID
// TODO: implement
HostVO host = ApiDBUtils.findHostById(isoHost.getHostId());
DataCenterVO datacenter = ApiDBUtils.findZoneById(host.getDataCenterId());
isoResponse.setZoneId(host.getDataCenterId());
isoResponse.setZoneName(datacenter.getName());
// If the user is an admin, add the template download status
if (isAdmin || account.getId() == iso.getAccountId()) {
// add download status
if (isoHost.getDownloadState()!=Status.DOWNLOADED) {
String isoStatus = "Processing";
if (isoHost.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
isoStatus = "Download Complete";
} else if (isoHost.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (isoHost.getDownloadPercent() == 100) {
isoStatus = "Installing ISO";
} else {
isoStatus = isoHost.getDownloadPercent() + "% Downloaded";
}
} else {
isoStatus = isoHost.getErrorString();
}
isoResponse.setStatus(isoStatus);
} else {
isoResponse.setStatus("Successfully Installed");
}
}
Long isoSize = isoHost.getSize();
if (isoSize > 0) {
isoResponse.setSize(isoSize);
}
isoResponse.setObjectName("iso");
isoResponses.add(isoResponse);
}
}
response.setResponses(isoResponses);
return response;
}
private List<CapacityVO> sumCapacities(List<? extends Capacity> hostCapacities) {
Map<String, Long> totalCapacityMap = new HashMap<String, Long>();
Map<String, Long> usedCapacityMap = new HashMap<String, Long>();
Set<Long> poolIdsToIgnore = new HashSet<Long>();
Criteria c = new Criteria();
// TODO: implement
List<? extends StoragePoolVO> allStoragePools = ApiDBUtils.searchForStoragePools(c);
for (StoragePoolVO pool : allStoragePools) {
StoragePoolType poolType = pool.getPoolType();
if (!(poolType.equals(StoragePoolType.NetworkFilesystem) || poolType.equals(StoragePoolType.IscsiLUN))) {
poolIdsToIgnore.add(pool.getId());
}
}
// collect all the capacity types, sum allocated/used and sum total...get one capacity number for each
for (Capacity capacity : hostCapacities) {
if (poolIdsToIgnore.contains(capacity.getHostOrPoolId())) {
continue;
}
String key = capacity.getCapacityType() + "_" + capacity.getDataCenterId();
String keyForPodTotal = key + "_-1";
boolean sumPodCapacity = false;
if (capacity.getPodId() != null) {
key += "_" + capacity.getPodId();
sumPodCapacity = true;
}
Long totalCapacity = totalCapacityMap.get(key);
Long usedCapacity = usedCapacityMap.get(key);
if (totalCapacity == null) {
totalCapacity = new Long(capacity.getTotalCapacity());
} else {
totalCapacity = new Long(capacity.getTotalCapacity() + totalCapacity);
}
if (usedCapacity == null) {
usedCapacity = new Long(capacity.getUsedCapacity());
} else {
usedCapacity = new Long(capacity.getUsedCapacity() + usedCapacity);
}
totalCapacityMap.put(key, totalCapacity);
usedCapacityMap.put(key, usedCapacity);
if (sumPodCapacity) {
totalCapacity = totalCapacityMap.get(keyForPodTotal);
usedCapacity = usedCapacityMap.get(keyForPodTotal);
if (totalCapacity == null) {
totalCapacity = new Long(capacity.getTotalCapacity());
} else {
totalCapacity = new Long(capacity.getTotalCapacity() + totalCapacity);
}
if (usedCapacity == null) {
usedCapacity = new Long(capacity.getUsedCapacity());
} else {
usedCapacity = new Long(capacity.getUsedCapacity() + usedCapacity);
}
totalCapacityMap.put(keyForPodTotal, totalCapacity);
usedCapacityMap.put(keyForPodTotal, usedCapacity);
}
}
List<CapacityVO> summedCapacities = new ArrayList<CapacityVO>();
for (String key : totalCapacityMap.keySet()) {
CapacityVO summedCapacity = new CapacityVO();
StringTokenizer st = new StringTokenizer(key, "_");
summedCapacity.setCapacityType(Short.parseShort(st.nextToken()));
summedCapacity.setDataCenterId(Long.parseLong(st.nextToken()));
if (st.hasMoreTokens()) {
summedCapacity.setPodId(Long.parseLong(st.nextToken()));
}
summedCapacity.setTotalCapacity(totalCapacityMap.get(key));
summedCapacity.setUsedCapacity(usedCapacityMap.get(key));
summedCapacities.add(summedCapacity);
}
return summedCapacities;
}
@Override
public List<CapacityResponse> createCapacityResponse(List<? extends Capacity> result, DecimalFormat format) {
List<CapacityResponse> capacityResponses = new ArrayList<CapacityResponse>();
List<CapacityVO> summedCapacities = sumCapacities(result);
for (CapacityVO summedCapacity : summedCapacities) {
CapacityResponse capacityResponse = new CapacityResponse();
capacityResponse.setCapacityTotal(summedCapacity.getTotalCapacity());
capacityResponse.setCapacityType(summedCapacity.getCapacityType());
capacityResponse.setCapacityUsed(summedCapacity.getUsedCapacity());
if (summedCapacity.getPodId() != null) {
capacityResponse.setPodId(summedCapacity.getPodId());
if (summedCapacity.getPodId() > 0) {
capacityResponse.setPodName(ApiDBUtils.findPodById(summedCapacity.getPodId()).getName());
} else {
capacityResponse.setPodName("All");
}
}
capacityResponse.setZoneId(summedCapacity.getDataCenterId());
capacityResponse.setZoneName(ApiDBUtils.findZoneById(summedCapacity.getDataCenterId()).getName());
if (summedCapacity.getTotalCapacity() != 0) {
//float computed = ((float)summedCapacity.getUsedCapacity() / (float)summedCapacity.getTotalCapacity() * 100f);
capacityResponse.setPercentUsed(format.format((float)summedCapacity.getUsedCapacity() / (float)summedCapacity.getTotalCapacity() * 100f));
} else {
capacityResponse.setPercentUsed(format.format(0L));
}
capacityResponse.setObjectName("capacity");
capacityResponses.add(capacityResponse);
}
return capacityResponses;
}
@Override
public TemplatePermissionsResponse createTemplatePermissionsResponse(List<String> accountNames, Long id, boolean isAdmin) {
Long templateOwnerDomain = null;
VirtualMachineTemplate template = ApiDBUtils.findTemplateById(id);
if (isAdmin) {
// FIXME: we have just template id and need to get template owner from that
Account templateOwner = ApiDBUtils.findAccountById(template.getAccountId());
if (templateOwner != null) {
templateOwnerDomain = templateOwner.getDomainId();
}
}
TemplatePermissionsResponse response = new TemplatePermissionsResponse();
response.setId(template.getId());
response.setPublicTemplate(template.isPublicTemplate());
if (isAdmin && (templateOwnerDomain != null)) {
response.setDomainId(templateOwnerDomain);
}
response.setAccountNames(accountNames);
response.setObjectName("templatepermission");
return response;
}
@Override
public AsyncJobResponse queryJobResult(QueryAsyncJobResultCmd cmd) throws InvalidParameterValueException{
AsyncJobResult result = ApiDBUtils._asyncMgr.queryAsyncJobResult(cmd);
AsyncJobResponse response = new AsyncJobResponse();
response.setId(result.getJobId());
response.setJobStatus(result.getJobStatus());
response.setJobProcStatus(result.getProcessStatus());
response.setJobResultCode(result.getResultCode());
response.setJobResult((ResponseObject)ApiSerializerHelper.fromSerializedString(result.getResult()));
Object resultObject = result.getResultObject();
if (resultObject != null) {
Class<?> clz = resultObject.getClass();
if(clz.isPrimitive() || clz.getSuperclass() == Number.class || clz == String.class || clz == Date.class) {
response.setJobResultType("text");
} else {
response.setJobResultType("object");
}
}
return response;
}
@Override
public NetworkGroupResponse createNetworkGroupResponseFromIngressRule(List<? extends IngressRule> ingressRules) {
NetworkGroupResponse response = new NetworkGroupResponse();
if ((ingressRules != null) && !ingressRules.isEmpty()) {
NetworkGroup networkGroup = ApiDBUtils.findNetworkGroupById(ingressRules.get(0).getNetworkGroupId());
response.setId(networkGroup.getId());
response.setName(networkGroup.getName());
response.setDescription(networkGroup.getDescription());
response.setAccountName(networkGroup.getAccountName());
response.setDomainId(networkGroup.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(networkGroup.getDomainId()).getName());
List<IngressRuleResponse> responses = new ArrayList<IngressRuleResponse>();
for (IngressRule ingressRule : ingressRules) {
IngressRuleResponse ingressData = new IngressRuleResponse();
ingressData.setRuleId(ingressRule.getId());
ingressData.setProtocol(ingressRule.getProtocol());
if ("icmp".equalsIgnoreCase(ingressRule.getProtocol())) {
ingressData.setIcmpType(ingressRule.getStartPort());
ingressData.setIcmpCode(ingressRule.getEndPort());
} else {
ingressData.setStartPort(ingressRule.getStartPort());
ingressData.setEndPort(ingressRule.getEndPort());
}
if (ingressRule.getAllowedNetworkGroup() != null) {
ingressData.setNetworkGroupName(ingressRule.getAllowedNetworkGroup());
ingressData.setAccountName(ingressRule.getAllowedNetGrpAcct());
} else {
ingressData.setCidr(ingressRule.getAllowedSourceIpCidr());
}
ingressData.setObjectName("ingressrule");
responses.add(ingressData);
}
response.setIngressRules(responses);
response.setObjectName("networkgroup");
}
return response;
}
@Override
public NetworkOfferingResponse createNetworkOfferingResponse(NetworkOffering offering) {
NetworkOfferingResponse response = new NetworkOfferingResponse();
response.setId(offering.getId());
response.setName(offering.getName());
response.setDisplayText(offering.getDisplayText());
response.setTags(offering.getTags());
response.setTrafficType(offering.getTrafficType().toString());
if (offering.getGuestIpType() != null) {
response.setType(offering.getGuestIpType().toString());
}
response.setMaxconnections(offering.getConcurrentConnections());
response.setIsDefault(offering.isDefault());
response.setSpecifyVlan(offering.getSpecifyVlan());
response.setAvailability(offering.getAvailability().toString());
response.setObjectName("networkoffering");
return response;
}
@Override
public NetworkResponse createNetworkResponse(Network network) {
NetworkResponse response = new NetworkResponse();
response.setId(network.getId());
response.setName(network.getName());
response.setDisplaytext(network.getDisplayText());
if (network.getBroadcastDomainType() != null) {
response.setBroadcastDomainType(network.getBroadcastDomainType().toString());
}
if (network.getBroadcastUri() != null) {
response.setBroadcastUri(network.getBroadcastUri().toString());
}
if (network.getTrafficType() != null) {
response.setTrafficType(network.getTrafficType().name());
}
if (network.getGuestType() != null) {
response.setType(network.getGuestType().name());
}
//get start ip and end ip of corresponding vlan
List<? extends Vlan> vlan= ApiDBUtils.listVlanByNetworkId(network.getId());
if (vlan != null && !vlan.isEmpty()) {
Vlan singleVlan = vlan.get(0);
String ipRange = singleVlan.getIpRange();
String[] range = ipRange.split("-");
response.setStartIp(range[0]);
response.setEndIp(range[1]);
response.setGateway(singleVlan.getVlanGateway());
response.setNetmask(singleVlan.getVlanNetmask());
response.setVlan(singleVlan.getVlanId());
}
response.setZoneId(network.getDataCenterId());
//populate network offering information
NetworkOffering networkOffering = ApiDBUtils.findNetworkOfferingById(network.getNetworkOfferingId());
if (networkOffering != null) {
response.setNetworkOfferingId(networkOffering.getId());
response.setNetworkOfferingName(networkOffering.getName());
response.setNetworkOfferingDisplayText(networkOffering.getDisplayText());
response.setIsSystem(networkOffering.isSystemOnly());
response.setNetworkOfferingAvailability(networkOffering.getAvailability().toString());
}
response.setIsShared(network.isShared());
response.setState(network.getState().toString());
response.setRelated(network.getRelated());
response.setDns1(network.getDns1());
response.setDns2(network.getDns2());
Account account = ApiDBUtils.findAccountById(network.getAccountId());
if (account != null) {
response.setAccountName(account.getAccountName());
Domain domain = ApiDBUtils.findDomainById(account.getDomainId());
response.setDomainId(domain.getId());
response.setDomain(domain.getName());
}
response.setObjectName("network");
return response;
}
}
|
server/src/com/cloud/api/ApiResponseHelper.java
|
/**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* aLong with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.api;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import org.GNOME.Accessibility._AccessibleStub;
import org.apache.log4j.Logger;
import com.cloud.api.commands.QueryAsyncJobResultCmd;
import com.cloud.api.response.AccountResponse;
import com.cloud.api.response.ApiResponseSerializer;
import com.cloud.api.response.AsyncJobResponse;
import com.cloud.api.response.CapacityResponse;
import com.cloud.api.response.ClusterResponse;
import com.cloud.api.response.ConfigurationResponse;
import com.cloud.api.response.CreateCmdResponse;
import com.cloud.api.response.DiskOfferingResponse;
import com.cloud.api.response.DomainResponse;
import com.cloud.api.response.DomainRouterResponse;
import com.cloud.api.response.EventResponse;
import com.cloud.api.response.ExtractResponse;
import com.cloud.api.response.FirewallRuleResponse;
import com.cloud.api.response.HostResponse;
import com.cloud.api.response.IPAddressResponse;
import com.cloud.api.response.IngressRuleResponse;
import com.cloud.api.response.InstanceGroupResponse;
import com.cloud.api.response.IpForwardingRuleResponse;
import com.cloud.api.response.ListResponse;
import com.cloud.api.response.LoadBalancerResponse;
import com.cloud.api.response.NetworkGroupResponse;
import com.cloud.api.response.NetworkOfferingResponse;
import com.cloud.api.response.NetworkResponse;
import com.cloud.api.response.NicResponse;
import com.cloud.api.response.PodResponse;
import com.cloud.api.response.PreallocatedLunResponse;
import com.cloud.api.response.RemoteAccessVpnResponse;
import com.cloud.api.response.ResourceLimitResponse;
import com.cloud.api.response.ServiceOfferingResponse;
import com.cloud.api.response.SnapshotPolicyResponse;
import com.cloud.api.response.SnapshotResponse;
import com.cloud.api.response.StoragePoolResponse;
import com.cloud.api.response.SystemVmResponse;
import com.cloud.api.response.TemplatePermissionsResponse;
import com.cloud.api.response.TemplateResponse;
import com.cloud.api.response.UserResponse;
import com.cloud.api.response.UserVmResponse;
import com.cloud.api.response.VlanIpRangeResponse;
import com.cloud.api.response.VolumeResponse;
import com.cloud.api.response.VpnUsersResponse;
import com.cloud.api.response.ZoneResponse;
import com.cloud.async.AsyncJob;
import com.cloud.async.AsyncJobResult;
import com.cloud.async.executor.IngressRuleResultObject;
import com.cloud.async.executor.NetworkGroupResultObject;
import com.cloud.capacity.Capacity;
import com.cloud.capacity.CapacityVO;
import com.cloud.configuration.Configuration;
import com.cloud.configuration.ResourceCount.ResourceType;
import com.cloud.configuration.ResourceLimit;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenter;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.Pod;
import com.cloud.dc.Vlan;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.domain.Domain;
import com.cloud.event.Event;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.host.Host;
import com.cloud.host.HostStats;
import com.cloud.host.HostVO;
import com.cloud.network.IpAddress;
import com.cloud.network.Network;
import com.cloud.network.RemoteAccessVpn;
import com.cloud.network.VpnUser;
import com.cloud.network.Networks.TrafficType;
import com.cloud.network.router.VirtualRouter;
import com.cloud.network.rules.FirewallRule;
import com.cloud.network.rules.LoadBalancer;
import com.cloud.network.rules.PortForwardingRule;
import com.cloud.network.security.IngressRule;
import com.cloud.network.security.NetworkGroup;
import com.cloud.network.security.NetworkGroupRules;
import com.cloud.offering.DiskOffering;
import com.cloud.offering.NetworkOffering;
import com.cloud.offering.NetworkOffering.GuestIpType;
import com.cloud.offering.ServiceOffering;
import com.cloud.org.Cluster;
import com.cloud.server.Criteria;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.GuestOS;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.Snapshot;
import com.cloud.storage.Snapshot.Type;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.Storage.TemplateType;
import com.cloud.storage.StoragePool;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.StorageStats;
import com.cloud.storage.UploadVO;
import com.cloud.storage.VMTemplateHostVO;
import com.cloud.storage.VMTemplateStorageResourceAssoc.Status;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.Volume;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.preallocatedlun.PreallocatedLunVO;
import com.cloud.storage.snapshot.SnapshotPolicy;
import com.cloud.template.VirtualMachineTemplate;
import com.cloud.test.PodZoneConfig;
import com.cloud.user.Account;
import com.cloud.user.AccountVO;
import com.cloud.user.User;
import com.cloud.user.UserAccount;
import com.cloud.user.UserContext;
import com.cloud.user.UserStatisticsVO;
import com.cloud.uservm.UserVm;
import com.cloud.utils.Pair;
import com.cloud.utils.net.NetUtils;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.InstanceGroup;
import com.cloud.vm.InstanceGroupVO;
import com.cloud.vm.Nic;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.State;
import com.cloud.vm.SystemVm;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VmStats;
public class ApiResponseHelper implements ResponseGenerator {
public final Logger s_logger = Logger.getLogger(ApiResponseHelper.class);
@Override
public UserResponse createUserResponse(UserAccount user) {
UserResponse userResponse = new UserResponse();
userResponse.setAccountName(user.getAccountName());
userResponse.setAccountType(user.getType());
userResponse.setCreated(user.getCreated());
userResponse.setDomainId(user.getDomainId());
userResponse.setDomainName(ApiDBUtils.findDomainById(user.getDomainId()).getName());
userResponse.setEmail(user.getEmail());
userResponse.setFirstname(user.getFirstname());
userResponse.setId(user.getId());
userResponse.setLastname(user.getLastname());
userResponse.setState(user.getState());
userResponse.setTimezone(user.getTimezone());
userResponse.setUsername(user.getUsername());
userResponse.setApiKey(user.getApiKey());
userResponse.setSecretKey(user.getSecretKey());
userResponse.setObjectName("user");
return userResponse;
}
@Override
public UserResponse createUserResponse(User user) {
UserResponse userResponse = new UserResponse();
Account account = ApiDBUtils.findAccountById(user.getAccountId());
userResponse.setAccountName(account.getAccountName());
userResponse.setAccountType(account.getType());
userResponse.setCreated(user.getCreated());
userResponse.setDomainId(account.getDomainId());
userResponse.setDomainName(ApiDBUtils.findDomainById(account.getDomainId()).getName());
userResponse.setEmail(user.getEmail());
userResponse.setFirstname(user.getFirstname());
userResponse.setId(user.getId());
userResponse.setLastname(user.getLastname());
userResponse.setState(user.getState());
userResponse.setTimezone(user.getTimezone());
userResponse.setUsername(user.getUsername());
userResponse.setApiKey(user.getApiKey());
userResponse.setSecretKey(user.getSecretKey());
userResponse.setObjectName("user");
return userResponse;
}
//this method is used for response generation via createAccount (which creates an account + user)
@Override
public UserResponse createUserAccountResponse(UserAccount user) {
UserResponse userResponse = new UserResponse();
userResponse.setAccountName(user.getAccountName());
userResponse.setAccountType(user.getType());
userResponse.setCreated(user.getCreated());
userResponse.setDomainId(user.getDomainId());
userResponse.setDomainName(ApiDBUtils.findDomainById(user.getDomainId()).getName());
userResponse.setEmail(user.getEmail());
userResponse.setFirstname(user.getFirstname());
userResponse.setId(user.getId());
userResponse.setLastname(user.getLastname());
userResponse.setState(user.getState());
userResponse.setTimezone(user.getTimezone());
userResponse.setUsername(user.getUsername());
userResponse.setApiKey(user.getApiKey());
userResponse.setSecretKey(user.getSecretKey());
userResponse.setObjectName("account");
return userResponse;
}
@Override
public AccountResponse createAccountResponse(Account account) {
boolean accountIsAdmin = (account.getType() == Account.ACCOUNT_TYPE_ADMIN);
AccountResponse accountResponse = new AccountResponse();
accountResponse.setId(account.getId());
accountResponse.setName(account.getAccountName());
accountResponse.setAccountType(account.getType());
accountResponse.setDomainId(account.getDomainId());
accountResponse.setDomainName(ApiDBUtils.findDomainById(account.getDomainId()).getName());
accountResponse.setState(account.getState());
// get network stat
List<UserStatisticsVO> stats = ApiDBUtils.listUserStatsBy(account.getId());
if (stats == null) {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Internal error searching for user stats");
}
Long bytesSent = 0L;
Long bytesReceived = 0L;
for (UserStatisticsVO stat : stats) {
Long rx = stat.getNetBytesReceived() + stat.getCurrentBytesReceived();
Long tx = stat.getNetBytesSent() + stat.getCurrentBytesSent();
bytesReceived = bytesReceived + Long.valueOf(rx);
bytesSent = bytesSent + Long.valueOf(tx);
}
accountResponse.setBytesReceived(bytesReceived);
accountResponse.setBytesSent(bytesSent);
// Get resource limits and counts
Long vmLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.user_vm, account.getId());
String vmLimitDisplay = (accountIsAdmin || vmLimit == -1) ? "Unlimited" : String.valueOf(vmLimit);
Long vmTotal = ApiDBUtils.getResourceCount(ResourceType.user_vm, account.getId());
String vmAvail = (accountIsAdmin || vmLimit == -1) ? "Unlimited" : String.valueOf(vmLimit - vmTotal);
accountResponse.setVmLimit(vmLimitDisplay);
accountResponse.setVmTotal(vmTotal);
accountResponse.setVmAvailable(vmAvail);
Long ipLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.public_ip, account.getId());
String ipLimitDisplay = (accountIsAdmin || ipLimit == -1) ? "Unlimited" : String.valueOf(ipLimit);
Long ipTotal = ApiDBUtils.getResourceCount(ResourceType.public_ip, account.getId());
String ipAvail = (accountIsAdmin || ipLimit == -1) ? "Unlimited" : String.valueOf(ipLimit - ipTotal);
accountResponse.setIpLimit(ipLimitDisplay);
accountResponse.setIpTotal(ipTotal);
accountResponse.setIpAvailable(ipAvail);
Long volumeLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.volume, account.getId());
String volumeLimitDisplay = (accountIsAdmin || volumeLimit == -1) ? "Unlimited" : String.valueOf(volumeLimit);
Long volumeTotal = ApiDBUtils.getResourceCount(ResourceType.volume, account.getId());
String volumeAvail = (accountIsAdmin || volumeLimit == -1) ? "Unlimited" : String.valueOf(volumeLimit - volumeTotal);
accountResponse.setVolumeLimit(volumeLimitDisplay);
accountResponse.setVolumeTotal(volumeTotal);
accountResponse.setVolumeAvailable(volumeAvail);
Long snapshotLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.snapshot, account.getId());
String snapshotLimitDisplay = (accountIsAdmin || snapshotLimit == -1) ? "Unlimited" : String.valueOf(snapshotLimit);
Long snapshotTotal = ApiDBUtils.getResourceCount(ResourceType.snapshot, account.getId());
String snapshotAvail = (accountIsAdmin || snapshotLimit == -1) ? "Unlimited" : String.valueOf(snapshotLimit - snapshotTotal);
accountResponse.setSnapshotLimit(snapshotLimitDisplay);
accountResponse.setSnapshotTotal(snapshotTotal);
accountResponse.setSnapshotAvailable(snapshotAvail);
Long templateLimit = ApiDBUtils.findCorrectResourceLimit(ResourceType.template, account.getId());
String templateLimitDisplay = (accountIsAdmin || templateLimit == -1) ? "Unlimited" : String.valueOf(templateLimit);
Long templateTotal = ApiDBUtils.getResourceCount(ResourceType.template, account.getId());
String templateAvail = (accountIsAdmin || templateLimit == -1) ? "Unlimited" : String.valueOf(templateLimit - templateTotal);
accountResponse.setTemplateLimit(templateLimitDisplay);
accountResponse.setTemplateTotal(templateTotal);
accountResponse.setTemplateAvailable(templateAvail);
// Get stopped and running VMs
int vmStopped = 0;
int vmRunning = 0;
Long[] accountIds = new Long[1];
accountIds[0] = account.getId();
Criteria c1 = new Criteria();
c1.addCriteria(Criteria.ACCOUNTID, accountIds);
List<? extends UserVm> virtualMachines = ApiDBUtils.searchForUserVMs(c1);
// get Running/Stopped VMs
for (Iterator<? extends UserVm> iter = virtualMachines.iterator(); iter.hasNext();) {
// count how many stopped/running vms we have
UserVm vm = iter.next();
if (vm.getState() == State.Stopped) {
vmStopped++;
} else if (vm.getState() == State.Running) {
vmRunning++;
}
}
accountResponse.setVmStopped(vmStopped);
accountResponse.setVmRunning(vmRunning);
accountResponse.setObjectName("account");
return accountResponse;
}
@Override
public DomainResponse createDomainResponse(Domain domain) {
DomainResponse domainResponse = new DomainResponse();
domainResponse.setDomainName(domain.getName());
domainResponse.setId(domain.getId());
domainResponse.setLevel(domain.getLevel());
domainResponse.setParentDomainId(domain.getParent());
if (domain.getParent() != null) {
domainResponse.setParentDomainName(ApiDBUtils.findDomainById(domain.getParent()).getName());
}
if (domain.getChildCount() > 0) {
domainResponse.setHasChild(true);
}
domainResponse.setObjectName("domain");
return domainResponse;
}
@Override
public DiskOfferingResponse createDiskOfferingResponse(DiskOffering offering) {
DiskOfferingResponse diskOfferingResponse = new DiskOfferingResponse();
diskOfferingResponse.setId(offering.getId());
diskOfferingResponse.setName(offering.getName());
diskOfferingResponse.setDisplayText(offering.getDisplayText());
diskOfferingResponse.setCreated(offering.getCreated());
diskOfferingResponse.setDiskSize(offering.getDiskSize());
if (offering.getDomainId() != null) {
diskOfferingResponse.setDomain(ApiDBUtils.findDomainById(offering.getDomainId()).getName());
diskOfferingResponse.setDomainId(offering.getDomainId());
}
diskOfferingResponse.setTags(offering.getTags());
diskOfferingResponse.setCustomized(offering.isCustomized());
diskOfferingResponse.setObjectName("diskoffering");
return diskOfferingResponse;
}
@Override
public ResourceLimitResponse createResourceLimitResponse(ResourceLimit limit) {
ResourceLimitResponse resourceLimitResponse = new ResourceLimitResponse();
if (limit.getDomainId() != null) {
resourceLimitResponse.setDomainId(limit.getDomainId());
resourceLimitResponse.setDomainName(ApiDBUtils.findDomainById(limit.getDomainId()).getName());
}
if (limit.getAccountId() != null) {
Account accountTemp = ApiDBUtils.findAccountById(limit.getAccountId());
if (accountTemp != null) {
resourceLimitResponse.setAccountName(accountTemp.getAccountName());
resourceLimitResponse.setDomainId(accountTemp.getDomainId());
resourceLimitResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
}
resourceLimitResponse.setResourceType(Integer.valueOf(limit.getType().ordinal()).toString());
resourceLimitResponse.setMax(limit.getMax());
resourceLimitResponse.setObjectName("resourcelimit");
return resourceLimitResponse;
}
@Override
public ServiceOfferingResponse createServiceOfferingResponse(ServiceOffering offering) {
ServiceOfferingResponse offeringResponse = new ServiceOfferingResponse();
offeringResponse.setId(offering.getId());
offeringResponse.setName(offering.getName());
offeringResponse.setDisplayText(offering.getDisplayText());
offeringResponse.setCpuNumber(offering.getCpu());
offeringResponse.setCpuSpeed(offering.getSpeed());
offeringResponse.setMemory(offering.getRamSize());
offeringResponse.setCreated(offering.getCreated());
offeringResponse.setStorageType(offering.getUseLocalStorage() ? "local" : "shared");
offeringResponse.setOfferHa(offering.getOfferHA());
offeringResponse.setUseVirtualNetwork(offering.getGuestIpType().equals(GuestIpType.Virtual));
offeringResponse.setTags(offering.getTags());
if(offering.getDomainId() != null){
offeringResponse.setDomain(ApiDBUtils.findDomainById(offering.getDomainId()).getName());
offeringResponse.setDomainId(offering.getDomainId());
}
offeringResponse.setObjectName("serviceoffering");
return offeringResponse;
}
@Override
public ConfigurationResponse createConfigurationResponse(Configuration cfg) {
ConfigurationResponse cfgResponse = new ConfigurationResponse();
cfgResponse.setCategory(cfg.getCategory());
cfgResponse.setDescription(cfg.getDescription());
cfgResponse.setName(cfg.getName());
cfgResponse.setValue(cfg.getValue());
cfgResponse.setObjectName("configuration");
return cfgResponse;
}
@Override
public SnapshotResponse createSnapshotResponse(Snapshot snapshot) {
SnapshotResponse snapshotResponse = new SnapshotResponse();
snapshotResponse.setId(snapshot.getId());
Account acct = ApiDBUtils.findAccountById(Long.valueOf(snapshot.getAccountId()));
if (acct != null) {
snapshotResponse.setAccountName(acct.getAccountName());
snapshotResponse.setDomainId(acct.getDomainId());
snapshotResponse.setDomainName(ApiDBUtils.findDomainById(acct.getDomainId()).getName());
}
VolumeVO volume = findVolumeById(snapshot.getVolumeId());
String snapshotTypeStr = Type.values()[snapshot.getSnapshotType()].name();
snapshotResponse.setSnapshotType(snapshotTypeStr);
snapshotResponse.setVolumeId(snapshot.getVolumeId());
if( volume != null ) {
snapshotResponse.setVolumeName(volume.getName());
snapshotResponse.setVolumeType(volume.getVolumeType().name());
}
snapshotResponse.setCreated(snapshot.getCreated());
snapshotResponse.setName(snapshot.getName());
snapshotResponse.setIntervalType(ApiDBUtils.getSnapshotIntervalTypes(snapshot.getId()));
snapshotResponse.setObjectName("snapshot");
return snapshotResponse;
}
@Override
public SnapshotPolicyResponse createSnapshotPolicyResponse(SnapshotPolicy policy) {
SnapshotPolicyResponse policyResponse = new SnapshotPolicyResponse();
policyResponse.setId(policy.getId());
policyResponse.setVolumeId(policy.getVolumeId());
policyResponse.setSchedule(policy.getSchedule());
policyResponse.setIntervalType(policy.getInterval());
policyResponse.setMaxSnaps(policy.getMaxSnaps());
policyResponse.setTimezone(policy.getTimezone());
policyResponse.setObjectName("snapshotpolicy");
return policyResponse;
}
@Override
public HostResponse createHostResponse(Host host) {
HostResponse hostResponse = new HostResponse();
hostResponse.setId(host.getId());
hostResponse.setCapabilities(host.getCapabilities());
hostResponse.setClusterId(host.getClusterId());
hostResponse.setCpuNumber(host.getCpus());
hostResponse.setZoneId(host.getDataCenterId());
hostResponse.setDisconnectedOn(host.getDisconnectedOn());
hostResponse.setHypervisor(host.getHypervisorType());
hostResponse.setHostType(host.getType());
hostResponse.setLastPinged(new Date(host.getLastPinged()));
hostResponse.setManagementServerId(host.getManagementServerId());
hostResponse.setName(host.getName());
hostResponse.setPodId(host.getPodId());
hostResponse.setRemoved(host.getRemoved());
hostResponse.setCpuSpeed(host.getSpeed());
hostResponse.setState(host.getStatus());
hostResponse.setIpAddress(host.getPrivateIpAddress());
hostResponse.setVersion(host.getVersion());
hostResponse.setCreated(host.getCreated());
GuestOSCategoryVO guestOSCategory = ApiDBUtils.getHostGuestOSCategory(host.getId());
if (guestOSCategory != null) {
hostResponse.setOsCategoryId(guestOSCategory.getId());
hostResponse.setOsCategoryName(guestOSCategory.getName());
}
hostResponse.setZoneName(ApiDBUtils.findZoneById(host.getDataCenterId()).getName());
if (host.getPodId() != null) {
hostResponse.setPodName(ApiDBUtils.findPodById(host.getPodId()).getName());
}
DecimalFormat decimalFormat = new DecimalFormat("#.##");
// calculate cpu allocated by vm
if ((host.getCpus() != null) && (host.getSpeed() != null)) {
int cpu = 0;
String cpuAlloc = null;
List<UserVmVO> instances = ApiDBUtils.listUserVMsByHostId(host.getId());
for (UserVmVO vm : instances) {
ServiceOffering so = ApiDBUtils.findServiceOfferingById(vm.getServiceOfferingId());
cpu += so.getCpu() * so.getSpeed();
}
cpuAlloc = decimalFormat.format(((float) cpu / (float) (host.getCpus() * host.getSpeed())) * 100f) + "%";
hostResponse.setCpuAllocated(cpuAlloc);
}
// calculate cpu utilized
String cpuUsed = null;
HostStats hostStats = ApiDBUtils.getHostStatistics(host.getId());
if (hostStats != null) {
float cpuUtil = (float) hostStats.getCpuUtilization();
cpuUsed = decimalFormat.format(cpuUtil) + "%";
hostResponse.setCpuUsed(cpuUsed);
hostResponse.setAverageLoad(Double.doubleToLongBits(hostStats.getAverageLoad()));
hostResponse.setNetworkKbsRead(Double.doubleToLongBits(hostStats.getNetworkReadKBs()));
hostResponse.setNetworkKbsWrite(Double.doubleToLongBits(hostStats.getNetworkWriteKBs()));
}
if (host.getType() == Host.Type.Routing) {
hostResponse.setMemoryTotal(host.getTotalMemory());
// calculate memory allocated by systemVM and userVm
Long mem = ApiDBUtils.getMemoryUsagebyHost(host.getId());
hostResponse.setMemoryAllocated(mem);
hostResponse.setMemoryUsed(mem);
} else if (host.getType().toString().equals("Storage")) {
hostResponse.setDiskSizeTotal(host.getTotalSize());
hostResponse.setDiskSizeAllocated(0L);
}
if (host.getClusterId() != null) {
ClusterVO cluster = ApiDBUtils.findClusterById(host.getClusterId());
hostResponse.setClusterName(cluster.getName());
}
hostResponse.setLocalStorageActive(ApiDBUtils.isLocalStorageActiveOnHost(host));
Set<com.cloud.host.Status.Event> possibleEvents = host.getStatus().getPossibleEvents();
if ((possibleEvents != null) && !possibleEvents.isEmpty()) {
String events = "";
Iterator<com.cloud.host.Status.Event> iter = possibleEvents.iterator();
while (iter.hasNext()) {
com.cloud.host.Status.Event event = iter.next();
events += event.toString();
if (iter.hasNext()) {
events += "; ";
}
}
hostResponse.setEvents(events);
}
hostResponse.setObjectName("host");
return hostResponse;
}
@Override
public VlanIpRangeResponse createVlanIpRangeResponse(Vlan vlan) {
Long podId = ApiDBUtils.getPodIdForVlan(vlan.getId());
VlanIpRangeResponse vlanResponse = new VlanIpRangeResponse();
vlanResponse.setId(vlan.getId());
vlanResponse.setForVirtualNetwork(vlan.getVlanType().equals(VlanType.VirtualNetwork));
vlanResponse.setVlan(vlan.getVlanId());
vlanResponse.setZoneId(vlan.getDataCenterId());
if (podId != null) {
HostPodVO pod = ApiDBUtils.findPodById(podId);
vlanResponse.setPodId(podId);
vlanResponse.setPodName(pod.getName());
}
vlanResponse.setGateway(vlan.getVlanGateway());
vlanResponse.setNetmask(vlan.getVlanNetmask());
//get start ip and end ip of corresponding vlan
String ipRange = vlan.getIpRange();
String[] range = ipRange.split("-");
vlanResponse.setStartIp(range[0]);
vlanResponse.setEndIp(range[1]);
Long networkId = vlan.getNetworkId();
if (networkId != null) {
vlanResponse.setNetworkId(vlan.getNetworkId());
Network network = ApiDBUtils.findNetworkById(networkId);
if (network != null) {
Long accountId = network.getAccountId();
//Set account information
if (accountId != null) {
Account account = ApiDBUtils.findAccountById(accountId);
vlanResponse.setAccountName(account.getAccountName());
vlanResponse.setDomainId(account.getDomainId());
vlanResponse.setDomainName(ApiDBUtils.findDomainById(account.getDomainId()).getName());
}
}
}
vlanResponse.setObjectName("vlan");
return vlanResponse;
}
@Override
public IPAddressResponse createIPAddressResponse(IpAddress ipAddress) {
VlanVO vlan = ApiDBUtils.findVlanById(ipAddress.getVlanId());
boolean forVirtualNetworks = vlan.getVlanType().equals(VlanType.VirtualNetwork);
IPAddressResponse ipResponse = new IPAddressResponse();
ipResponse.setIpAddress(ipAddress.getAddress());
if (ipAddress.getAllocatedTime() != null) {
ipResponse.setAllocated(ipAddress.getAllocatedTime());
}
ipResponse.setZoneId(ipAddress.getDataCenterId());
ipResponse.setZoneName(ApiDBUtils.findZoneById(ipAddress.getDataCenterId()).getName());
ipResponse.setSourceNat(ipAddress.isSourceNat());
// get account information
Account accountTemp = ApiDBUtils.findAccountById(ipAddress.getAllocatedToAccountId());
if (accountTemp != null) {
ipResponse.setAccountName(accountTemp.getAccountName());
ipResponse.setDomainId(accountTemp.getDomainId());
ipResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
ipResponse.setForVirtualNetwork(forVirtualNetworks);
ipResponse.setStaticNat(ipAddress.isOneToOneNat());
// show this info to admin only
Account account = UserContext.current().getAccount();
if ((account == null) || account.getType() == Account.ACCOUNT_TYPE_ADMIN) {
ipResponse.setVlanId(ipAddress.getVlanId());
ipResponse.setVlanName(ApiDBUtils.findVlanById(ipAddress.getVlanId()).getVlanId());
}
ipResponse.setObjectName("ipaddress");
return ipResponse;
}
@Override
public LoadBalancerResponse createLoadBalancerResponse(LoadBalancer loadBalancer) {
LoadBalancerResponse lbResponse = new LoadBalancerResponse();
lbResponse.setId(loadBalancer.getId());
lbResponse.setName(loadBalancer.getName());
lbResponse.setDescription(loadBalancer.getDescription());
lbResponse.setPublicIp(loadBalancer.getSourceIpAddress().toString());
lbResponse.setPublicPort(Integer.toString(loadBalancer.getSourcePortStart()));
lbResponse.setPrivatePort(Integer.toString(loadBalancer.getDefaultPortStart()));
lbResponse.setAlgorithm(loadBalancer.getAlgorithm());
Account accountTemp = ApiDBUtils.findAccountById(loadBalancer.getAccountId());
if (accountTemp != null) {
lbResponse.setAccountName(accountTemp.getAccountName());
lbResponse.setDomainId(accountTemp.getDomainId());
lbResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
lbResponse.setObjectName("loadbalancer");
return lbResponse;
}
@Override
public PodResponse createPodResponse(Pod pod) {
String[] ipRange = new String[2];
if (pod.getDescription() != null && pod.getDescription().length() > 0) {
ipRange = pod.getDescription().split("-");
} else {
ipRange[0] = pod.getDescription();
}
PodResponse podResponse = new PodResponse();
podResponse.setId(pod.getId());
podResponse.setName(pod.getName());
podResponse.setZoneId(pod.getDataCenterId());
podResponse.setZoneName(PodZoneConfig.getZoneName(pod.getDataCenterId()));
podResponse.setNetmask(NetUtils.getCidrNetmask(pod.getCidrSize()));
podResponse.setStartIp(ipRange[0]);
podResponse.setEndIp(((ipRange.length > 1) && (ipRange[1] != null)) ? ipRange[1] : "");
podResponse.setGateway(pod.getGateway());
podResponse.setObjectName("pod");
return podResponse;
}
@Override
public ZoneResponse createZoneResponse(DataCenter dataCenter) {
Account account = UserContext.current().getAccount();
ZoneResponse zoneResponse = new ZoneResponse();
zoneResponse.setId(dataCenter.getId());
zoneResponse.setName(dataCenter.getName());
if ((dataCenter.getDescription() != null) && !dataCenter.getDescription().equalsIgnoreCase("null")) {
zoneResponse.setDescription(dataCenter.getDescription());
}
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
zoneResponse.setDns1(dataCenter.getDns1());
zoneResponse.setDns2(dataCenter.getDns2());
zoneResponse.setInternalDns1(dataCenter.getInternalDns1());
zoneResponse.setInternalDns2(dataCenter.getInternalDns2());
zoneResponse.setVlan(dataCenter.getVnet());
zoneResponse.setGuestCidrAddress(dataCenter.getGuestNetworkCidr());
}
zoneResponse.setDomain(dataCenter.getDomain());
zoneResponse.setDomainId(dataCenter.getDomainId());
zoneResponse.setType(dataCenter.getNetworkType().toString());
zoneResponse.setObjectName("zone");
return zoneResponse;
}
@Override
public VolumeResponse createVolumeResponse(Volume volume) {
VolumeResponse volResponse = new VolumeResponse();
volResponse.setId(volume.getId());
if (volume.getName() != null) {
volResponse.setName(volume.getName());
} else {
volResponse.setName("");
}
volResponse.setZoneId(volume.getDataCenterId());
volResponse.setZoneName(ApiDBUtils.findZoneById(volume.getDataCenterId()).getName());
volResponse.setVolumeType(volume.getVolumeType().toString());
volResponse.setDeviceId(volume.getDeviceId());
Long instanceId = volume.getInstanceId();
if (instanceId != null) {
VMInstanceVO vm = ApiDBUtils.findVMInstanceById(instanceId);
volResponse.setVirtualMachineId(vm.getId());
volResponse.setVirtualMachineName(vm.getName());
volResponse.setVirtualMachineDisplayName(vm.getName());
volResponse.setVirtualMachineState(vm.getState().toString());
}
// Show the virtual size of the volume
volResponse.setSize(volume.getSize());
volResponse.setCreated(volume.getCreated());
volResponse.setState(volume.getStatus().toString());
Account accountTemp = ApiDBUtils.findAccountById(volume.getAccountId());
if (accountTemp != null) {
volResponse.setAccountName(accountTemp.getAccountName());
volResponse.setDomainId(accountTemp.getDomainId());
volResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
String storageType;
try {
if (volume.getPoolId() == null) {
if (volume.getState() == Volume.State.Allocated) {
/* set it as shared, so the UI can attach it to VM */
storageType = "shared";
} else {
storageType = "unknown";
}
} else {
storageType = ApiDBUtils.volumeIsOnSharedStorage(volume.getId()) ? "shared" : "local";
}
} catch (InvalidParameterValueException e) {
s_logger.error(e.getMessage(), e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Volume " + volume.getName() + " does not have a valid ID");
}
volResponse.setStorageType(storageType);
if (volume.getVolumeType().equals(Volume.VolumeType.ROOT)) {
volResponse.setServiceOfferingId(volume.getDiskOfferingId());
} else {
volResponse.setDiskOfferingId(volume.getDiskOfferingId());
}
DiskOfferingVO diskOffering = ApiDBUtils.findDiskOfferingById(volume.getDiskOfferingId());
if (volume.getVolumeType().equals(Volume.VolumeType.ROOT)) {
volResponse.setServiceOfferingName(diskOffering.getName());
volResponse.setServiceOfferingDisplayText(diskOffering.getDisplayText());
} else {
volResponse.setDiskOfferingName(diskOffering.getName());
volResponse.setDiskOfferingDisplayText(diskOffering.getDisplayText());
}
Long poolId = volume.getPoolId();
String poolName = (poolId == null) ? "none" : ApiDBUtils.findStoragePoolById(poolId).getName();
volResponse.setStoragePoolName(poolName);
volResponse.setSourceId(volume.getSourceId());
if (volume.getSourceType() != null) {
volResponse.setSourceType(volume.getSourceType().toString());
}
volResponse.setHypervisor(ApiDBUtils.getVolumeHyperType(volume.getId()).toString());
volResponse.setAttached(volume.getAttached());
volResponse.setDestroyed(volume.getDestroyed());
volResponse.setObjectName("volume");
return volResponse;
}
@Override
public InstanceGroupResponse createInstanceGroupResponse(InstanceGroup group) {
InstanceGroupResponse groupResponse = new InstanceGroupResponse();
groupResponse.setId(group.getId());
groupResponse.setName(group.getName());
groupResponse.setCreated(group.getCreated());
Account accountTemp = ApiDBUtils.findAccountById(group.getAccountId());
if (accountTemp != null) {
groupResponse.setAccountName(accountTemp.getAccountName());
groupResponse.setDomainId(accountTemp.getDomainId());
groupResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
groupResponse.setObjectName("instancegroup");
return groupResponse;
}
@Override
public PreallocatedLunResponse createPreallocatedLunResponse(Object result) {
PreallocatedLunVO preallocatedLun = (PreallocatedLunVO)result;
PreallocatedLunResponse preallocLunResponse = new PreallocatedLunResponse();
preallocLunResponse.setId(preallocatedLun.getId());
preallocLunResponse.setVolumeId(preallocatedLun.getVolumeId());
preallocLunResponse.setZoneId(preallocatedLun.getDataCenterId());
preallocLunResponse.setLun(preallocatedLun.getLun());
preallocLunResponse.setPortal(preallocatedLun.getPortal());
preallocLunResponse.setSize(preallocatedLun.getSize());
preallocLunResponse.setTaken(preallocatedLun.getTaken());
preallocLunResponse.setTargetIqn(preallocatedLun.getTargetIqn());
preallocLunResponse.setObjectName("preallocatedlun");
return preallocLunResponse;
}
@Override
public StoragePoolResponse createStoragePoolResponse(StoragePool pool) {
StoragePoolResponse poolResponse = new StoragePoolResponse();
poolResponse.setId(pool.getId());
poolResponse.setName(pool.getName());
poolResponse.setState(pool.getStatus());
poolResponse.setPath(pool.getPath());
poolResponse.setIpAddress(pool.getHostAddress());
poolResponse.setZoneId(pool.getDataCenterId());
poolResponse.setZoneName(ApiDBUtils.findZoneById(pool.getDataCenterId()).getName());
if (pool.getPoolType() != null) {
poolResponse.setType(pool.getPoolType().toString());
}
if (pool.getPodId() != null) {
poolResponse.setPodId(pool.getPodId());
poolResponse.setPodName(ApiDBUtils.findPodById(pool.getPodId()).getName());
}
if (pool.getCreated() != null) {
poolResponse.setCreated(pool.getCreated());
}
StorageStats stats = ApiDBUtils.getStoragePoolStatistics(pool.getId());
Long capacity = pool.getCapacityBytes();
Long available = pool.getAvailableBytes();
Long used = capacity - available;
if (stats != null) {
used = stats.getByteUsed();
available = capacity - used;
}
poolResponse.setDiskSizeTotal(pool.getCapacityBytes());
poolResponse.setDiskSizeAllocated(used);
if (pool.getClusterId() != null) {
ClusterVO cluster = ApiDBUtils.findClusterById(pool.getClusterId());
poolResponse.setClusterId(cluster.getId());
poolResponse.setClusterName(cluster.getName());
}
poolResponse.setTags(ApiDBUtils.getStoragePoolTags(pool.getId()));
poolResponse.setObjectName("storagepool");
return poolResponse;
}
@Override
public ClusterResponse createClusterResponse(Cluster cluster) {
ClusterResponse clusterResponse = new ClusterResponse();
clusterResponse.setId(cluster.getId());
clusterResponse.setName(cluster.getName());
clusterResponse.setPodId(cluster.getPodId());
clusterResponse.setZoneId(cluster.getDataCenterId());
HostPodVO pod = ApiDBUtils.findPodById(cluster.getPodId());
clusterResponse.setPodName(pod.getName());
DataCenterVO zone = ApiDBUtils.findZoneById(cluster.getDataCenterId());
clusterResponse.setZoneName(zone.getName());
clusterResponse.setObjectName("cluster");
return clusterResponse;
}
@Override
public FirewallRuleResponse createFirewallRuleResponse(PortForwardingRule fwRule) {
FirewallRuleResponse response = new FirewallRuleResponse();
response.setId(fwRule.getId());
response.setPrivatePort(Integer.toString(fwRule.getDestinationPortStart()));
response.setProtocol(fwRule.getProtocol());
response.setPublicPort(Integer.toString(fwRule.getSourcePortStart()));
response.setPublicIpAddress(fwRule.getSourceIpAddress().toString());
if (fwRule.getSourceIpAddress() != null && fwRule.getDestinationIpAddress() != null) {
UserVm vm = ApiDBUtils.findUserVmById(fwRule.getVirtualMachineId());
if(vm != null){
response.setVirtualMachineId(vm.getId());
response.setVirtualMachineName(vm.getName());
response.setVirtualMachineDisplayName(vm.getDisplayName());
}
}
FirewallRule.State state = fwRule.getState();
String stateToSet = state.toString();
if (state.equals(FirewallRule.State.Revoke)) {
stateToSet = "Deleting";
}
response.setState(stateToSet);
response.setObjectName("portforwardingrule");
return response;
}
@Override
public IpForwardingRuleResponse createIpForwardingRuleResponse(PortForwardingRule fwRule) {
IpForwardingRuleResponse response = new IpForwardingRuleResponse();
response.setId(fwRule.getId());
response.setProtocol(fwRule.getProtocol());
response.setPublicIpAddress(fwRule.getSourceIpAddress().addr());
if (fwRule.getSourceIpAddress() != null && fwRule.getDestinationIpAddress() != null) {
UserVm vm = ApiDBUtils.findUserVmById(fwRule.getVirtualMachineId());
if(vm != null){//vm might be destroyed
response.setVirtualMachineId(vm.getId());
response.setVirtualMachineName(vm.getName());
response.setVirtualMachineDisplayName(vm.getDisplayName());
}
}
FirewallRule.State state = fwRule.getState();
String stateToSet = state.toString();
if (state.equals(FirewallRule.State.Revoke)) {
stateToSet = "Deleting";
}
response.setState(stateToSet);
response.setObjectName("ipforwardingrule");
return response;
}
@Override
public UserVmResponse createUserVmResponse(UserVm userVm) {
UserVmResponse userVmResponse = new UserVmResponse();
Account acct = ApiDBUtils.findAccountById(Long.valueOf(userVm.getAccountId()));
// FIXME - this check should be done in searchForUserVm method in
// ManagementServerImpl;
// otherwise the number of vms returned is not going to match pageSize
// request parameter
if ((acct != null) && (acct.getRemoved() == null)) {
userVmResponse.setAccountName(acct.getAccountName());
userVmResponse.setDomainId(acct.getDomainId());
userVmResponse.setDomainName(ApiDBUtils.findDomainById(acct.getDomainId()).getName());
} else {
return null; // the account has been deleted, skip this VM in the
// response
}
userVmResponse.setId(userVm.getId());
userVmResponse.setName(userVm.getName());
userVmResponse.setCreated(userVm.getCreated());
if (userVm.getState() != null) {
userVmResponse.setState(userVm.getState().toString());
}
userVmResponse.setHaEnable(userVm.isHaEnabled());
if (userVm.getDisplayName() != null) {
userVmResponse.setDisplayName(userVm.getDisplayName());
} else {
userVmResponse.setDisplayName(userVm.getName());
}
InstanceGroupVO group = ApiDBUtils.findInstanceGroupForVM(userVm.getId());
if (group != null) {
userVmResponse.setGroup(group.getName());
userVmResponse.setGroupId(group.getId());
}
// Data Center Info
userVmResponse.setZoneId(userVm.getDataCenterId());
userVmResponse.setZoneName(ApiDBUtils.findZoneById(userVm.getDataCenterId()).getName());
Account account = UserContext.current().getAccount();
// if user is an admin, display host id
if (((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) && (userVm.getHostId() != null)) {
userVmResponse.setHostId(userVm.getHostId());
userVmResponse.setHostName(ApiDBUtils.findHostById(userVm.getHostId()).getName());
}
// Template Info
VMTemplateVO template = ApiDBUtils.findTemplateById(userVm.getTemplateId());
if (template != null) {
userVmResponse.setTemplateId(userVm.getTemplateId());
userVmResponse.setTemplateName(template.getName());
userVmResponse.setTemplateDisplayText(template.getDisplayText());
userVmResponse.setPasswordEnabled(template.getEnablePassword());
} else {
userVmResponse.setTemplateId(-1L);
userVmResponse.setTemplateName("ISO Boot");
userVmResponse.setTemplateDisplayText("ISO Boot");
userVmResponse.setPasswordEnabled(false);
}
if (userVm.getPassword() != null) {
userVmResponse.setPassword(userVm.getPassword());
}
// ISO Info
if (userVm.getIsoId() != null) {
VMTemplateVO iso = ApiDBUtils.findTemplateById(userVm.getIsoId());
if (iso != null) {
userVmResponse.setIsoId(userVm.getIsoId());
userVmResponse.setIsoName(iso.getName());
}
}
// Service Offering Info
ServiceOffering offering = ApiDBUtils.findServiceOfferingById(userVm.getServiceOfferingId());
userVmResponse.setServiceOfferingId(userVm.getServiceOfferingId());
userVmResponse.setServiceOfferingName(offering.getName());
userVmResponse.setCpuNumber(offering.getCpu());
userVmResponse.setCpuSpeed(offering.getSpeed());
userVmResponse.setMemory(offering.getRamSize());
VolumeVO rootVolume = ApiDBUtils.findRootVolume(userVm.getId());
if (rootVolume != null) {
userVmResponse.setRootDeviceId(rootVolume.getDeviceId());
String rootDeviceType = "Not created";
if (rootVolume.getPoolId() != null) {
StoragePoolVO storagePool = ApiDBUtils.findStoragePoolById(rootVolume.getPoolId());
rootDeviceType = storagePool.getPoolType().toString();
}
userVmResponse.setRootDeviceType(rootDeviceType);
}
// stats calculation
DecimalFormat decimalFormat = new DecimalFormat("#.##");
String cpuUsed = null;
VmStats vmStats = ApiDBUtils.getVmStatistics(userVm.getId());
if (vmStats != null) {
float cpuUtil = (float) vmStats.getCPUUtilization();
cpuUsed = decimalFormat.format(cpuUtil) + "%";
userVmResponse.setCpuUsed(cpuUsed);
Long networkKbRead = Double.doubleToLongBits(vmStats.getNetworkReadKBs());
userVmResponse.setNetworkKbsRead(networkKbRead);
Long networkKbWrite = Double.doubleToLongBits(vmStats.getNetworkWriteKBs());
userVmResponse.setNetworkKbsWrite(networkKbWrite);
}
userVmResponse.setGuestOsId(userVm.getGuestOSId());
// network groups
userVmResponse.setNetworkGroupList(ApiDBUtils.getNetworkGroupsNamesForVm(userVm.getId()));
List<? extends Nic> nics = ApiDBUtils.getNics(userVm);
List<NicResponse> nicResponses = new ArrayList<NicResponse>();
for (Nic singleNic : nics) {
NicResponse nicResponse = new NicResponse();
nicResponse.setId(singleNic.getId());
nicResponse.setIpaddress(singleNic.getIp4Address());
nicResponse.setGateway(singleNic.getGateway());
nicResponse.setNetmask(singleNic.getNetmask());
nicResponse.setNetworkid(singleNic.getNetworkId());
if (acct.getType() == Account.ACCOUNT_TYPE_ADMIN) {
if (singleNic.getBroadcastUri() != null) {
nicResponse.setBroadcastUri(singleNic.getBroadcastUri().toString());
}
if (singleNic.getIsolationUri() != null) {
nicResponse.setIsolationUri(singleNic.getIsolationUri().toString());
}
}
//Set traffic type
Network network = ApiDBUtils.findNetworkById(singleNic.getNetworkId());
nicResponse.setTrafficType(network.getTrafficType().toString());
//Set type
NetworkOffering networkOffering = ApiDBUtils.findNetworkOfferingById(network.getNetworkOfferingId());
if (networkOffering.getGuestIpType() != null) {
nicResponse.setType(networkOffering.getGuestIpType().toString());
}
nicResponse.setObjectName("nic");
nicResponses.add(nicResponse);
}
userVmResponse.setNics(nicResponses);
userVmResponse.setObjectName("virtualmachine");
return userVmResponse;
}
@Override
public DomainRouterResponse createDomainRouterResponse(VirtualRouter router) {
DomainRouterResponse routerResponse = new DomainRouterResponse();
routerResponse.setId(router.getId());
routerResponse.setZoneId(router.getDataCenterId());
routerResponse.setName(router.getName());
routerResponse.setPodId(router.getPodId());
routerResponse.setTemplateId(router.getTemplateId());
routerResponse.setCreated(router.getCreated());
routerResponse.setState(router.getState());
routerResponse.setNetworkDomain(router.getDomain());
if (router.getHostId() != null) {
routerResponse.setHostId(router.getHostId());
routerResponse.setHostName(ApiDBUtils.findHostById(router.getHostId()).getName());
}
Account accountTemp = ApiDBUtils.findAccountById(router.getAccountId());
if (accountTemp != null) {
routerResponse.setAccountName(accountTemp.getAccountName());
routerResponse.setDomainId(accountTemp.getDomainId());
routerResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
List<? extends Nic> nics = ApiDBUtils.getNics(router);
for (Nic singleNic : nics) {
Network network = ApiDBUtils.findNetworkById(singleNic.getNetworkId());
if (network != null) {
if (network.getTrafficType() == TrafficType.Public) {
routerResponse.setPublicIp(singleNic.getIp4Address());
routerResponse.setPublicMacAddress(singleNic.getMacAddress());
routerResponse.setPublicNetmask(singleNic.getNetmask());
} else if (network.getTrafficType() == TrafficType.Control) {
routerResponse.setPrivateIp(singleNic.getIp4Address());
routerResponse.setPrivateMacAddress(singleNic.getMacAddress());
routerResponse.setPrivateNetmask(singleNic.getNetmask());
} else if (network.getTrafficType() == TrafficType.Guest) {
routerResponse.setGuestIpAddress(singleNic.getIp4Address());
routerResponse.setGuestMacAddress(singleNic.getMacAddress());
routerResponse.setGuestNetmask(singleNic.getNetmask());
}
}
}
DataCenter zone = ApiDBUtils.findZoneById(router.getDataCenterId());
if (zone != null) {
routerResponse.setZoneName(zone.getName());
routerResponse.setDns1(zone.getDns1());
routerResponse.setDns2(zone.getDns2());
}
routerResponse.setObjectName("domainrouter");
return routerResponse;
}
@Override
public SystemVmResponse createSystemVmResponse(VirtualMachine systemVM) {
SystemVmResponse vmResponse = new SystemVmResponse();
if (systemVM instanceof SystemVm) {
SystemVm vm = (SystemVm) systemVM;
vmResponse.setId(vm.getId());
vmResponse.setSystemVmType(vm.getType().toString().toLowerCase());
vmResponse.setZoneId(vm.getDataCenterId());
vmResponse.setNetworkDomain(vm.getDomain());
vmResponse.setName(vm.getName());
vmResponse.setPodId(vm.getPodId());
vmResponse.setTemplateId(vm.getTemplateId());
vmResponse.setCreated(vm.getCreated());
if (vm.getHostId() != null) {
vmResponse.setHostId(vm.getHostId());
vmResponse.setHostName(ApiDBUtils.findHostById(vm.getHostId()).getName());
}
if (vm.getState() != null) {
vmResponse.setState(vm.getState().toString());
}
String instanceType = "console_proxy";
if (systemVM instanceof SecondaryStorageVmVO) {
instanceType = "sec_storage_vm"; // FIXME: this should be a
// constant so that the async
// jobs get updated with the
// correct instance type, they
// are using
// different instance types at
// the moment
}
// for console proxies, add the active sessions
if (systemVM instanceof ConsoleProxyVO) {
ConsoleProxyVO proxy = (ConsoleProxyVO) systemVM;
vmResponse.setActiveViewerSessions(proxy.getActiveSession());
}
DataCenter zone = ApiDBUtils.findZoneById(vm.getDataCenterId());
if (zone != null) {
vmResponse.setZoneName(zone.getName());
vmResponse.setDns1(zone.getDns1());
vmResponse.setDns2(zone.getDns2());
}
List<? extends Nic> nics = ApiDBUtils.getNics(systemVM);
for (Nic singleNic : nics) {
Network network = ApiDBUtils.findNetworkById(singleNic.getNetworkId());
if (network != null) {
if (network.getTrafficType() == TrafficType.Public) {
vmResponse.setPublicIp(singleNic.getIp4Address());
vmResponse.setPublicMacAddress(singleNic.getMacAddress());
vmResponse.setPublicNetmask(singleNic.getNetmask());
} else if (network.getTrafficType() == TrafficType.Control) {
vmResponse.setPrivateIp(singleNic.getIp4Address());
vmResponse.setPrivateMacAddress(singleNic.getMacAddress());
vmResponse.setPrivateNetmask(singleNic.getNetmask());
}
}
}
}
vmResponse.setObjectName("systemvm");
return vmResponse;
}
@Override
public void synchronizeCommand(Object job, String syncObjType, Long syncObjId) {
ApiDBUtils.synchronizeCommand(job, syncObjType, syncObjId);
}
@Override
public User findUserById(Long userId) {
return ApiDBUtils.findUserById(userId);
}
@Override
public UserVm findUserVmById(Long vmId) {
return ApiDBUtils.findUserVmById(vmId);
}
@Override
public VolumeVO findVolumeById(Long volumeId) {
return ApiDBUtils.findVolumeById(volumeId);
}
@Override
public Account findAccountByNameDomain(String accountName, Long domainId) {
return ApiDBUtils.findAccountByNameDomain(accountName, domainId);
}
@Override
public VirtualMachineTemplate findTemplateById(Long templateId) {
return ApiDBUtils.findTemplateById(templateId);
}
@Override
public VpnUsersResponse createVpnUserResponse(VpnUser vpnUser) {
VpnUsersResponse vpnResponse = new VpnUsersResponse();
vpnResponse.setId(vpnUser.getId());
vpnResponse.setUserName(vpnUser.getUsername());
vpnResponse.setAccountName(vpnUser.getAccountName());
Account accountTemp = ApiDBUtils.findAccountById(vpnUser.getAccountId());
if (accountTemp != null) {
vpnResponse.setDomainId(accountTemp.getDomainId());
vpnResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
vpnResponse.setObjectName("vpnuser");
return vpnResponse;
}
@Override
public RemoteAccessVpnResponse createRemoteAccessVpnResponse(RemoteAccessVpn vpn) {
RemoteAccessVpnResponse vpnResponse = new RemoteAccessVpnResponse();
vpnResponse.setId(vpn.getId());
vpnResponse.setPublicIp(vpn.getVpnServerAddress());
vpnResponse.setIpRange(vpn.getIpRange());
vpnResponse.setPresharedKey(vpn.getIpsecPresharedKey());
vpnResponse.setAccountName(vpn.getAccountName());
Account accountTemp = ApiDBUtils.findAccountById(vpn.getAccountId());
if (accountTemp != null) {
vpnResponse.setDomainId(accountTemp.getDomainId());
vpnResponse.setDomainName(ApiDBUtils.findDomainById(accountTemp.getDomainId()).getName());
}
vpnResponse.setObjectName("remoteaccessvpn");
return vpnResponse;
}
@Override
public TemplateResponse createIsoResponse(VirtualMachineTemplate result) {
TemplateResponse response = new TemplateResponse();
response.setId(result.getId());
response.setName(result.getName());
response.setDisplayText(result.getDisplayText());
response.setPublic(result.isPublicTemplate());
response.setCreated(result.getCreated());
response.setFormat(result.getFormat());
response.setOsTypeId(result.getGuestOSId());
response.setOsTypeName(ApiDBUtils.findGuestOSById(result.getGuestOSId()).getDisplayName());
if(result.getFormat() == ImageFormat.ISO){ // Templates are always bootable
response.setBootable(result.isBootable());
}else{
response.setHypervisor(result.getHypervisorType().toString());// hypervisors are associated with templates
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(result.getAccountId());
if (owner != null) {
response.setAccount(owner.getAccountName());
response.setDomainId(owner.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
response.setObjectName("iso");
return response;
}
@Override
public void createTemplateResponse(List<TemplateResponse> responses, Pair<Long,Long> templateZonePair, boolean isAdmin, Account account) {
List<VMTemplateHostVO> templateHostRefsForTemplate = ApiDBUtils.listTemplateHostBy(templateZonePair.first(), templateZonePair.second());
VMTemplateVO template = ApiDBUtils.findTemplateById(templateZonePair.first());
for (VMTemplateHostVO templateHostRef : templateHostRefsForTemplate) {
TemplateResponse templateResponse = new TemplateResponse();
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
templateResponse.setCreated(templateHostRef.getCreated());
templateResponse.setReady(templateHostRef.getDownloadState()==Status.DOWNLOADED);
templateResponse.setFeatured(template.isFeatured());
templateResponse.setPasswordEnabled(template.getEnablePassword());
templateResponse.setCrossZones(template.isCrossZones());
templateResponse.setFormat(template.getFormat());
if (template.getTemplateType() != null) {
templateResponse.setTemplateType(template.getTemplateType().toString());
}
templateResponse.setHypervisor(template.getHypervisorType().toString());
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
templateResponse.setOsTypeId(os.getId());
templateResponse.setOsTypeName(os.getDisplayName());
} else {
templateResponse.setOsTypeId(-1L);
templateResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
HostVO host = ApiDBUtils.findHostById(templateHostRef.getHostId());
DataCenterVO datacenter = ApiDBUtils.findZoneById(host.getDataCenterId());
// Add the zone ID
templateResponse.setZoneId(host.getDataCenterId());
templateResponse.setZoneName(datacenter.getName());
// If the user is an admin, add the template download status
if (isAdmin || account.getId() == template.getAccountId()) {
// add download status
if (templateHostRef.getDownloadState()!=Status.DOWNLOADED) {
String templateStatus = "Processing";
if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (templateHostRef.getDownloadPercent() == 100) {
templateStatus = "Installing Template";
} else {
templateStatus = templateHostRef.getDownloadPercent() + "% Downloaded";
}
} else {
templateStatus = templateHostRef.getErrorString();
}
templateResponse.setStatus(templateStatus);
} else if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
templateResponse.setStatus("Download Complete");
} else {
templateResponse.setStatus("Successfully Installed");
}
}
Long templateSize = templateHostRef.getSize();
if (templateSize > 0) {
templateResponse.setSize(templateSize);
}
templateResponse.setObjectName("template");
responses.add(templateResponse);
}
}
@Override
public ListResponse<TemplateResponse> createTemplateResponse2(VirtualMachineTemplate template, Long zoneId) {
ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>();
List<TemplateResponse> responses = new ArrayList<TemplateResponse>();
List<DataCenterVO> zones = null;
if ((zoneId != null) && (zoneId != -1)) {
zones = new ArrayList<DataCenterVO>();
zones.add(ApiDBUtils.findZoneById(zoneId));
} else {
zones = ApiDBUtils.listZones();
}
for (DataCenterVO zone : zones) {
TemplateResponse templateResponse = new TemplateResponse();
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
templateResponse.setExtractable(template.isExtractable());
templateResponse.setCrossZones(template.isCrossZones());
VMTemplateHostVO isoHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), zone.getId());
if (isoHostRef != null) {
templateResponse.setCreated(isoHostRef.getCreated());
templateResponse.setReady(isoHostRef.getDownloadState() == Status.DOWNLOADED);
}
templateResponse.setFeatured(template.isFeatured());
templateResponse.setPasswordEnabled(template.getEnablePassword());
templateResponse.setFormat(template.getFormat());
templateResponse.setStatus("Processing");
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
templateResponse.setOsTypeId(os.getId());
templateResponse.setOsTypeName(os.getDisplayName());
} else {
templateResponse.setOsTypeId(-1L);
templateResponse.setOsTypeName("");
}
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccountId(owner.getId());
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
templateResponse.setZoneId(zone.getId());
templateResponse.setZoneName(zone.getName());
templateResponse.setHypervisor(template.getHypervisorType().toString());
templateResponse.setObjectName("template");
responses.add(templateResponse);
}
response.setResponses(responses);
return response;
}
@Override
public ListResponse<TemplateResponse> createIsoResponses(VirtualMachineTemplate template, Long zoneId) {
ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>();
List<TemplateResponse> responses = new ArrayList<TemplateResponse>();
List<DataCenterVO> zones = null;
if ((zoneId != null) && (zoneId != -1)) {
zones = new ArrayList<DataCenterVO>();
zones.add(ApiDBUtils.findZoneById(zoneId));
} else {
zones = ApiDBUtils.listZones();
}
for (DataCenterVO zone : zones) {
TemplateResponse templateResponse = new TemplateResponse();
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
VMTemplateHostVO isoHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), zone.getId());
if (isoHostRef != null) {
templateResponse.setCreated(isoHostRef.getCreated());
templateResponse.setReady(isoHostRef.getDownloadState() == Status.DOWNLOADED);
}
templateResponse.setFeatured(template.isFeatured());
templateResponse.setBootable(template.isBootable());
templateResponse.setOsTypeId(template.getGuestOSId());
templateResponse.setOsTypeName(ApiDBUtils.findGuestOSById(template.getGuestOSId()).getDisplayName());
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccountId(owner.getId());
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
templateResponse.setZoneId(zone.getId());
templateResponse.setZoneName(zone.getName());
templateResponse.setObjectName("iso");
responses.add(templateResponse);
}
response.setResponses(responses);
return response;
}
@Override
public ListResponse<NetworkGroupResponse> createNetworkGroupResponses(List<? extends NetworkGroupRules> networkGroups) {
List<NetworkGroupResultObject> groupResultObjs = NetworkGroupResultObject.transposeNetworkGroups(networkGroups);
ListResponse<NetworkGroupResponse> response = new ListResponse<NetworkGroupResponse>();
List<NetworkGroupResponse> netGrpResponses = new ArrayList<NetworkGroupResponse>();
for (NetworkGroupResultObject networkGroup : groupResultObjs) {
NetworkGroupResponse netGrpResponse = new NetworkGroupResponse();
netGrpResponse.setId(networkGroup.getId());
netGrpResponse.setName(networkGroup.getName());
netGrpResponse.setDescription(networkGroup.getDescription());
netGrpResponse.setAccountName(networkGroup.getAccountName());
netGrpResponse.setDomainId(networkGroup.getDomainId());
netGrpResponse.setDomainName(ApiDBUtils.findDomainById(networkGroup.getDomainId()).getName());
List<IngressRuleResultObject> ingressRules = networkGroup.getIngressRules();
if ((ingressRules != null) && !ingressRules.isEmpty()) {
List<IngressRuleResponse> ingressRulesResponse = new ArrayList<IngressRuleResponse>();
for (IngressRuleResultObject ingressRule : ingressRules) {
IngressRuleResponse ingressData = new IngressRuleResponse();
ingressData.setRuleId(ingressRule.getId());
ingressData.setProtocol(ingressRule.getProtocol());
if ("icmp".equalsIgnoreCase(ingressRule.getProtocol())) {
ingressData.setIcmpType(ingressRule.getStartPort());
ingressData.setIcmpCode(ingressRule.getEndPort());
} else {
ingressData.setStartPort(ingressRule.getStartPort());
ingressData.setEndPort(ingressRule.getEndPort());
}
if (ingressRule.getAllowedNetworkGroup() != null) {
ingressData.setNetworkGroupName(ingressRule.getAllowedNetworkGroup());
ingressData.setAccountName(ingressRule.getAllowedNetGroupAcct());
} else {
ingressData.setCidr(ingressRule.getAllowedSourceIpCidr());
}
ingressData.setObjectName("ingressrule");
ingressRulesResponse.add(ingressData);
}
netGrpResponse.setIngressRules(ingressRulesResponse);
}
netGrpResponse.setObjectName("securitygroup");
netGrpResponses.add(netGrpResponse);
}
response.setResponses(netGrpResponses);
return response;
}
@Override
public NetworkGroupResponse createNetworkGroupResponse(NetworkGroup group) {
NetworkGroupResponse response = new NetworkGroupResponse();
response.setAccountName(group.getAccountName());
response.setDescription(group.getDescription());
response.setDomainId(group.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(group.getDomainId()).getName());
response.setId(group.getId());
response.setName(group.getName());
response.setObjectName("securitygroup");
return response;
}
@Override
public ExtractResponse createExtractResponse(Long uploadId, Long id, Long zoneId, Long accountId, String mode) {
UploadVO uploadInfo = ApiDBUtils.findUploadById(uploadId);
ExtractResponse response = new ExtractResponse();
response.setObjectName("template");
response.setId(id);
response.setName(ApiDBUtils.findTemplateById(id).getName());
response.setZoneId(zoneId);
response.setZoneName(ApiDBUtils.findZoneById(zoneId).getName());
response.setMode(mode);
response.setUploadId(uploadId);
response.setState(uploadInfo.getUploadState().toString());
response.setAccountId(accountId);
//FIX ME - Need to set the url once the gson jar is upgraded since it is throwing an error right now.
//response.setUrl(uploadInfo.getUploadUrl());
response.setUrl(uploadInfo.getUploadUrl().replaceAll("/", "%2F"));
return response;
}
@Override
public TemplateResponse createTemplateResponse(VirtualMachineTemplate template, Long destZoneId) {
TemplateResponse templateResponse = new TemplateResponse();
if (template != null) {
templateResponse.setId(template.getId());
templateResponse.setName(template.getName());
templateResponse.setDisplayText(template.getDisplayText());
templateResponse.setPublic(template.isPublicTemplate());
templateResponse.setBootable(template.isBootable());
templateResponse.setFeatured(template.isFeatured());
templateResponse.setCrossZones(template.isCrossZones());
templateResponse.setCreated(template.getCreated());
templateResponse.setFormat(template.getFormat());
templateResponse.setPasswordEnabled(template.getEnablePassword());
templateResponse.setZoneId(destZoneId);
templateResponse.setZoneName(ApiDBUtils.findZoneById(destZoneId).getName());
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
templateResponse.setOsTypeId(os.getId());
templateResponse.setOsTypeName(os.getDisplayName());
} else {
templateResponse.setOsTypeId(-1L);
templateResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
templateResponse.setAccount(owner.getAccountName());
templateResponse.setDomainId(owner.getDomainId());
templateResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
//set status
Account account = UserContext.current().getAccount();
boolean isAdmin = false;
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN) || (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)) {
isAdmin = true;
}
//Return download status for admin users
VMTemplateHostVO templateHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), destZoneId);
if (isAdmin || template.getAccountId() == account.getId()) {
if (templateHostRef.getDownloadState()!=Status.DOWNLOADED) {
String templateStatus = "Processing";
if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (templateHostRef.getDownloadPercent() == 100) {
templateStatus = "Installing Template";
} else {
templateStatus = templateHostRef.getDownloadPercent() + "% Downloaded";
}
} else {
templateStatus = templateHostRef.getErrorString();
}
templateResponse.setStatus(templateStatus);
} else if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
templateResponse.setStatus("Download Complete");
} else {
templateResponse.setStatus("Successfully Installed");
}
}
templateResponse.setReady(templateHostRef != null && templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED);
} else {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Failed to copy template");
}
templateResponse.setObjectName("template");
return templateResponse;
}
@Override
public TemplateResponse createIsoResponse3(VirtualMachineTemplate iso, Long destZoneId) {
TemplateResponse isoResponse = new TemplateResponse();
if (iso != null) {
isoResponse.setId(iso.getId());
isoResponse.setName(iso.getName());
isoResponse.setDisplayText(iso.getDisplayText());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setBootable(iso.isBootable());
isoResponse.setFeatured(iso.isFeatured());
isoResponse.setCrossZones(iso.isCrossZones());
isoResponse.setCreated(iso.getCreated());
isoResponse.setZoneId(destZoneId);
isoResponse.setZoneName(ApiDBUtils.findZoneById(destZoneId).getName());
GuestOS os = ApiDBUtils.findGuestOSById(iso.getGuestOSId());
if (os != null) {
isoResponse.setOsTypeId(os.getId());
isoResponse.setOsTypeName(os.getDisplayName());
} else {
isoResponse.setOsTypeId(-1L);
isoResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(iso.getAccountId());
if (owner != null) {
isoResponse.setAccount(owner.getAccountName());
isoResponse.setDomainId(owner.getDomainId());
isoResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
//set status
Account account = UserContext.current().getAccount();
boolean isAdmin = false;
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN) || (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)) {
isAdmin = true;
}
//Return download status for admin users
VMTemplateHostVO templateHostRef = ApiDBUtils.findTemplateHostRef(iso.getId(), destZoneId);
if (isAdmin || iso.getAccountId() == account.getId()) {
if (templateHostRef.getDownloadState()!=Status.DOWNLOADED) {
String templateStatus = "Processing";
if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (templateHostRef.getDownloadPercent() == 100) {
templateStatus = "Installing Template";
} else {
templateStatus = templateHostRef.getDownloadPercent() + "% Downloaded";
}
} else {
templateStatus = templateHostRef.getErrorString();
}
isoResponse.setStatus(templateStatus);
} else if (templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
isoResponse.setStatus("Download Complete");
} else {
isoResponse.setStatus("Successfully Installed");
}
}
isoResponse.setReady(templateHostRef.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED);
} else {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Failed to copy iso");
}
isoResponse.setObjectName("iso");
return isoResponse;
}
@Override
public String toSerializedString(CreateCmdResponse response, String responseType) {
return ApiResponseSerializer.toSerializedString(response, responseType);
}
@Override
public AsyncJobResponse createAsyncJobResponse(AsyncJob job) {
AsyncJobResponse jobResponse = new AsyncJobResponse();
jobResponse.setAccountId(job.getAccountId());
jobResponse.setCmd(job.getCmd());
jobResponse.setCreated(job.getCreated());
jobResponse.setId(job.getId());
jobResponse.setJobInstanceId(job.getInstanceId());
jobResponse.setJobInstanceType(job.getInstanceType().toString());
jobResponse.setJobProcStatus(job.getProcessStatus());
jobResponse.setJobResult((ResponseObject)ApiSerializerHelper.fromSerializedString(job.getResult()));
jobResponse.setJobResultCode(job.getResultCode());
jobResponse.setJobStatus(job.getStatus());
jobResponse.setUserId(job.getUserId());
jobResponse.setObjectName("asyncjobs");
return jobResponse;
}
@Override
public TemplateResponse createTemplateResponse(VirtualMachineTemplate template, Long snapshotId, Long volumeId) {
TemplateResponse response = new TemplateResponse();
response.setId(template.getId());
response.setName(template.getName());
response.setDisplayText(template.getDisplayText());
response.setPublic(template.isPublicTemplate());
response.setPasswordEnabled(template.getEnablePassword());
response.setCrossZones(template.isCrossZones());
VolumeVO volume = null;
if (snapshotId != null) {
Snapshot snapshot = ApiDBUtils.findSnapshotById(snapshotId);
volume = findVolumeById(snapshot.getVolumeId());
} else {
volume = findVolumeById(volumeId);
}
VMTemplateHostVO templateHostRef = ApiDBUtils.findTemplateHostRef(template.getId(), volume.getDataCenterId());
response.setCreated(templateHostRef.getCreated());
response.setReady(templateHostRef != null && templateHostRef.getDownloadState() == Status.DOWNLOADED);
GuestOS os = ApiDBUtils.findGuestOSById(template.getGuestOSId());
if (os != null) {
response.setOsTypeId(os.getId());
response.setOsTypeName(os.getDisplayName());
} else {
response.setOsTypeId(-1L);
response.setOsTypeName("");
}
Account owner = ApiDBUtils.findAccountById(template.getAccountId());
if (owner != null) {
response.setAccount(owner.getAccountName());
response.setDomainId(owner.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
DataCenter zone = ApiDBUtils.findZoneById(volume.getDataCenterId());
if (zone != null) {
response.setZoneId(zone.getId());
response.setZoneName(zone.getName());
}
response.setObjectName("template");
return response;
}
@Override
public EventResponse createEventResponse(Event event) {
EventResponse responseEvent = new EventResponse();
responseEvent.setAccountName(event.getAccountName());
responseEvent.setCreated(event.getCreateDate());
responseEvent.setDescription(event.getDescription());
responseEvent.setDomainId(event.getDomainId());
responseEvent.setEventType(event.getType());
responseEvent.setId(event.getId());
responseEvent.setLevel(event.getLevel());
responseEvent.setParentId(event.getStartId());
responseEvent.setState(event.getState());
responseEvent.setDomainName(ApiDBUtils.findDomainById(event.getDomainId()).getName());
User user = ApiDBUtils.findUserById(event.getUserId());
if (user != null) {
responseEvent.setUsername(user.getUsername());
}
responseEvent.setObjectName("event");
return responseEvent;
}
@Override
public ListResponse<TemplateResponse> createIsoResponse(Set<Pair<Long,Long>> isoZonePairSet, boolean isAdmin, Account account) {
ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>();
List<TemplateResponse> isoResponses = new ArrayList<TemplateResponse>();
for (Pair<Long,Long> isoZonePair : isoZonePairSet) {
VMTemplateVO iso = ApiDBUtils.findTemplateById(isoZonePair.first());
if ( iso.getTemplateType() == TemplateType.PERHOST ) {
TemplateResponse isoResponse = new TemplateResponse();
isoResponse.setId(iso.getId());
isoResponse.setName(iso.getName());
isoResponse.setDisplayText(iso.getDisplayText());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setReady(true);
isoResponse.setBootable(iso.isBootable());
isoResponse.setFeatured(iso.isFeatured());
isoResponse.setCrossZones(iso.isCrossZones());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setObjectName("iso");
isoResponses.add(isoResponse);
response.setResponses(isoResponses);
continue;
}
List<VMTemplateHostVO> isoHosts = ApiDBUtils.listTemplateHostBy(iso.getId(), isoZonePair.second());
for (VMTemplateHostVO isoHost : isoHosts) {
TemplateResponse isoResponse = new TemplateResponse();
isoResponse.setId(iso.getId());
isoResponse.setName(iso.getName());
isoResponse.setDisplayText(iso.getDisplayText());
isoResponse.setPublic(iso.isPublicTemplate());
isoResponse.setCreated(isoHost.getCreated());
isoResponse.setReady(isoHost.getDownloadState() == Status.DOWNLOADED);
isoResponse.setBootable(iso.isBootable());
isoResponse.setFeatured(iso.isFeatured());
isoResponse.setCrossZones(iso.isCrossZones());
isoResponse.setPublic(iso.isPublicTemplate());
// TODO: implement
GuestOS os = ApiDBUtils.findGuestOSById(iso.getGuestOSId());
if (os != null) {
isoResponse.setOsTypeId(os.getId());
isoResponse.setOsTypeName(os.getDisplayName());
} else {
isoResponse.setOsTypeId(-1L);
isoResponse.setOsTypeName("");
}
// add account ID and name
Account owner = ApiDBUtils.findAccountById(iso.getAccountId());
if (owner != null) {
isoResponse.setAccount(owner.getAccountName());
isoResponse.setDomainId(owner.getDomainId());
// TODO: implement
isoResponse.setDomainName(ApiDBUtils.findDomainById(owner.getDomainId()).getName());
}
// Add the zone ID
// TODO: implement
HostVO host = ApiDBUtils.findHostById(isoHost.getHostId());
DataCenterVO datacenter = ApiDBUtils.findZoneById(host.getDataCenterId());
isoResponse.setZoneId(host.getDataCenterId());
isoResponse.setZoneName(datacenter.getName());
// If the user is an admin, add the template download status
if (isAdmin || account.getId() == iso.getAccountId()) {
// add download status
if (isoHost.getDownloadState()!=Status.DOWNLOADED) {
String isoStatus = "Processing";
if (isoHost.getDownloadState() == VMTemplateHostVO.Status.DOWNLOADED) {
isoStatus = "Download Complete";
} else if (isoHost.getDownloadState() == VMTemplateHostVO.Status.DOWNLOAD_IN_PROGRESS) {
if (isoHost.getDownloadPercent() == 100) {
isoStatus = "Installing ISO";
} else {
isoStatus = isoHost.getDownloadPercent() + "% Downloaded";
}
} else {
isoStatus = isoHost.getErrorString();
}
isoResponse.setStatus(isoStatus);
} else {
isoResponse.setStatus("Successfully Installed");
}
}
Long isoSize = isoHost.getSize();
if (isoSize > 0) {
isoResponse.setSize(isoSize);
}
isoResponse.setObjectName("iso");
isoResponses.add(isoResponse);
}
}
response.setResponses(isoResponses);
return response;
}
private List<CapacityVO> sumCapacities(List<? extends Capacity> hostCapacities) {
Map<String, Long> totalCapacityMap = new HashMap<String, Long>();
Map<String, Long> usedCapacityMap = new HashMap<String, Long>();
Set<Long> poolIdsToIgnore = new HashSet<Long>();
Criteria c = new Criteria();
// TODO: implement
List<? extends StoragePoolVO> allStoragePools = ApiDBUtils.searchForStoragePools(c);
for (StoragePoolVO pool : allStoragePools) {
StoragePoolType poolType = pool.getPoolType();
if (!(poolType.equals(StoragePoolType.NetworkFilesystem) || poolType.equals(StoragePoolType.IscsiLUN))) {
poolIdsToIgnore.add(pool.getId());
}
}
// collect all the capacity types, sum allocated/used and sum total...get one capacity number for each
for (Capacity capacity : hostCapacities) {
if (poolIdsToIgnore.contains(capacity.getHostOrPoolId())) {
continue;
}
String key = capacity.getCapacityType() + "_" + capacity.getDataCenterId();
String keyForPodTotal = key + "_-1";
boolean sumPodCapacity = false;
if (capacity.getPodId() != null) {
key += "_" + capacity.getPodId();
sumPodCapacity = true;
}
Long totalCapacity = totalCapacityMap.get(key);
Long usedCapacity = usedCapacityMap.get(key);
if (totalCapacity == null) {
totalCapacity = new Long(capacity.getTotalCapacity());
} else {
totalCapacity = new Long(capacity.getTotalCapacity() + totalCapacity);
}
if (usedCapacity == null) {
usedCapacity = new Long(capacity.getUsedCapacity());
} else {
usedCapacity = new Long(capacity.getUsedCapacity() + usedCapacity);
}
totalCapacityMap.put(key, totalCapacity);
usedCapacityMap.put(key, usedCapacity);
if (sumPodCapacity) {
totalCapacity = totalCapacityMap.get(keyForPodTotal);
usedCapacity = usedCapacityMap.get(keyForPodTotal);
if (totalCapacity == null) {
totalCapacity = new Long(capacity.getTotalCapacity());
} else {
totalCapacity = new Long(capacity.getTotalCapacity() + totalCapacity);
}
if (usedCapacity == null) {
usedCapacity = new Long(capacity.getUsedCapacity());
} else {
usedCapacity = new Long(capacity.getUsedCapacity() + usedCapacity);
}
totalCapacityMap.put(keyForPodTotal, totalCapacity);
usedCapacityMap.put(keyForPodTotal, usedCapacity);
}
}
List<CapacityVO> summedCapacities = new ArrayList<CapacityVO>();
for (String key : totalCapacityMap.keySet()) {
CapacityVO summedCapacity = new CapacityVO();
StringTokenizer st = new StringTokenizer(key, "_");
summedCapacity.setCapacityType(Short.parseShort(st.nextToken()));
summedCapacity.setDataCenterId(Long.parseLong(st.nextToken()));
if (st.hasMoreTokens()) {
summedCapacity.setPodId(Long.parseLong(st.nextToken()));
}
summedCapacity.setTotalCapacity(totalCapacityMap.get(key));
summedCapacity.setUsedCapacity(usedCapacityMap.get(key));
summedCapacities.add(summedCapacity);
}
return summedCapacities;
}
@Override
public List<CapacityResponse> createCapacityResponse(List<? extends Capacity> result, DecimalFormat format) {
List<CapacityResponse> capacityResponses = new ArrayList<CapacityResponse>();
List<CapacityVO> summedCapacities = sumCapacities(result);
for (CapacityVO summedCapacity : summedCapacities) {
CapacityResponse capacityResponse = new CapacityResponse();
capacityResponse.setCapacityTotal(summedCapacity.getTotalCapacity());
capacityResponse.setCapacityType(summedCapacity.getCapacityType());
capacityResponse.setCapacityUsed(summedCapacity.getUsedCapacity());
if (summedCapacity.getPodId() != null) {
capacityResponse.setPodId(summedCapacity.getPodId());
if (summedCapacity.getPodId() > 0) {
capacityResponse.setPodName(ApiDBUtils.findPodById(summedCapacity.getPodId()).getName());
} else {
capacityResponse.setPodName("All");
}
}
capacityResponse.setZoneId(summedCapacity.getDataCenterId());
capacityResponse.setZoneName(ApiDBUtils.findZoneById(summedCapacity.getDataCenterId()).getName());
if (summedCapacity.getTotalCapacity() != 0) {
//float computed = ((float)summedCapacity.getUsedCapacity() / (float)summedCapacity.getTotalCapacity() * 100f);
capacityResponse.setPercentUsed(format.format((float)summedCapacity.getUsedCapacity() / (float)summedCapacity.getTotalCapacity() * 100f));
} else {
capacityResponse.setPercentUsed(format.format(0L));
}
capacityResponse.setObjectName("capacity");
capacityResponses.add(capacityResponse);
}
return capacityResponses;
}
@Override
public TemplatePermissionsResponse createTemplatePermissionsResponse(List<String> accountNames, Long id, boolean isAdmin) {
Long templateOwnerDomain = null;
VirtualMachineTemplate template = ApiDBUtils.findTemplateById(id);
if (isAdmin) {
// FIXME: we have just template id and need to get template owner from that
Account templateOwner = ApiDBUtils.findAccountById(template.getAccountId());
if (templateOwner != null) {
templateOwnerDomain = templateOwner.getDomainId();
}
}
TemplatePermissionsResponse response = new TemplatePermissionsResponse();
response.setId(template.getId());
response.setPublicTemplate(template.isPublicTemplate());
if (isAdmin && (templateOwnerDomain != null)) {
response.setDomainId(templateOwnerDomain);
}
response.setAccountNames(accountNames);
response.setObjectName("templatepermission");
return response;
}
@Override
public AsyncJobResponse queryJobResult(QueryAsyncJobResultCmd cmd) throws InvalidParameterValueException{
AsyncJobResult result = ApiDBUtils._asyncMgr.queryAsyncJobResult(cmd);
AsyncJobResponse response = new AsyncJobResponse();
response.setId(result.getJobId());
response.setJobStatus(result.getJobStatus());
response.setJobProcStatus(result.getProcessStatus());
response.setJobResultCode(result.getResultCode());
response.setJobResult((ResponseObject)ApiSerializerHelper.fromSerializedString(result.getResult()));
Object resultObject = result.getResultObject();
if (resultObject != null) {
Class<?> clz = resultObject.getClass();
if(clz.isPrimitive() || clz.getSuperclass() == Number.class || clz == String.class || clz == Date.class) {
response.setJobResultType("text");
} else {
response.setJobResultType("object");
}
}
return response;
}
@Override
public NetworkGroupResponse createNetworkGroupResponseFromIngressRule(List<? extends IngressRule> ingressRules) {
NetworkGroupResponse response = new NetworkGroupResponse();
if ((ingressRules != null) && !ingressRules.isEmpty()) {
NetworkGroup networkGroup = ApiDBUtils.findNetworkGroupById(ingressRules.get(0).getNetworkGroupId());
response.setId(networkGroup.getId());
response.setName(networkGroup.getName());
response.setDescription(networkGroup.getDescription());
response.setAccountName(networkGroup.getAccountName());
response.setDomainId(networkGroup.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(networkGroup.getDomainId()).getName());
List<IngressRuleResponse> responses = new ArrayList<IngressRuleResponse>();
for (IngressRule ingressRule : ingressRules) {
IngressRuleResponse ingressData = new IngressRuleResponse();
ingressData.setRuleId(ingressRule.getId());
ingressData.setProtocol(ingressRule.getProtocol());
if ("icmp".equalsIgnoreCase(ingressRule.getProtocol())) {
ingressData.setIcmpType(ingressRule.getStartPort());
ingressData.setIcmpCode(ingressRule.getEndPort());
} else {
ingressData.setStartPort(ingressRule.getStartPort());
ingressData.setEndPort(ingressRule.getEndPort());
}
if (ingressRule.getAllowedNetworkGroup() != null) {
ingressData.setNetworkGroupName(ingressRule.getAllowedNetworkGroup());
ingressData.setAccountName(ingressRule.getAllowedNetGrpAcct());
} else {
ingressData.setCidr(ingressRule.getAllowedSourceIpCidr());
}
ingressData.setObjectName("ingressrule");
responses.add(ingressData);
}
response.setIngressRules(responses);
response.setObjectName("networkgroup");
}
return response;
}
@Override
public NetworkOfferingResponse createNetworkOfferingResponse(NetworkOffering offering) {
NetworkOfferingResponse response = new NetworkOfferingResponse();
response.setId(offering.getId());
response.setName(offering.getName());
response.setDisplayText(offering.getDisplayText());
response.setTags(offering.getTags());
response.setTrafficType(offering.getTrafficType().toString());
if (offering.getGuestIpType() != null) {
response.setType(offering.getGuestIpType().toString());
}
response.setMaxconnections(offering.getConcurrentConnections());
response.setIsDefault(offering.isDefault());
response.setSpecifyVlan(offering.getSpecifyVlan());
response.setAvailability(offering.getAvailability().toString());
response.setObjectName("networkoffering");
return response;
}
@Override
public NetworkResponse createNetworkResponse(Network network) {
NetworkResponse response = new NetworkResponse();
response.setId(network.getId());
response.setName(network.getName());
response.setDisplaytext(network.getDisplayText());
if (network.getBroadcastDomainType() != null) {
response.setBroadcastDomainType(network.getBroadcastDomainType().toString());
}
if (network.getBroadcastUri() != null) {
response.setBroadcastUri(network.getBroadcastUri().toString());
}
if (network.getTrafficType() != null) {
response.setTrafficType(network.getTrafficType().name());
}
if (network.getGuestType() != null) {
response.setType(network.getGuestType().name());
}
//get start ip and end ip of corresponding vlan
List<? extends Vlan> vlan= ApiDBUtils.listVlanByNetworkId(network.getId());
if (vlan != null && !vlan.isEmpty()) {
Vlan singleVlan = vlan.get(0);
String ipRange = singleVlan.getIpRange();
String[] range = ipRange.split("-");
response.setStartIp(range[0]);
response.setEndIp(range[1]);
response.setGateway(singleVlan.getVlanGateway());
response.setNetmask(singleVlan.getVlanNetmask());
response.setVlan(singleVlan.getVlanId());
}
response.setZoneId(network.getDataCenterId());
//populate network offering information
NetworkOffering networkOffering = ApiDBUtils.findNetworkOfferingById(network.getNetworkOfferingId());
if (networkOffering != null) {
response.setNetworkOfferingId(networkOffering.getId());
response.setNetworkOfferingName(networkOffering.getName());
response.setNetworkOfferingDisplayText(networkOffering.getDisplayText());
response.setIsSystem(networkOffering.isSystemOnly());
response.setNetworkOfferingAvailability(networkOffering.getAvailability().toString());
}
response.setIsShared(network.isShared());
response.setState(network.getState().toString());
response.setRelated(network.getRelated());
response.setDns1(network.getDns1());
response.setDns2(network.getDns2());
Account account = ApiDBUtils.findAccountById(network.getAccountId());
if (account != null) {
response.setAccountName(account.getAccountName());
Domain domain = ApiDBUtils.findDomainById(account.getDomainId());
response.setDomainId(domain.getId());
response.setDomain(domain.getName());
}
response.setObjectName("network");
return response;
}
}
|
Fixed the build due to including some wrong class.
|
server/src/com/cloud/api/ApiResponseHelper.java
|
Fixed the build due to including some wrong class.
|
|
Java
|
apache-2.0
|
f62d78d1cd45629d9ad9ea661add5303cdc93230
| 0
|
itfsw/mybatis-generator-plugin
|
/*
* Copyright (c) 2017.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itfsw.mybatis.generator.plugins;
import com.itfsw.mybatis.generator.plugins.utils.BasePlugin;
import com.itfsw.mybatis.generator.plugins.utils.PluginTools;
import com.itfsw.mybatis.generator.plugins.utils.XmlElementGeneratorTools;
import org.mybatis.generator.api.IntrospectedColumn;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.dom.java.*;
import org.mybatis.generator.api.dom.xml.*;
import org.mybatis.generator.codegen.mybatis3.MyBatis3FormattingUtilities;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* ---------------------------------------------------------------------------
* Selective 增强插件
* ---------------------------------------------------------------------------
* @author: hewei
* @time:2017/4/20 15:39
* ---------------------------------------------------------------------------
*/
public class SelectiveEnhancedPlugin extends BasePlugin {
/**
* {@inheritDoc}
*/
@Override
public boolean validate(List<String> warnings) {
// 插件使用前提是使用了ModelColumnPlugin插件
if (!PluginTools.checkDependencyPlugin(getContext(), ModelColumnPlugin.class)) {
logger.error("itfsw:插件" + this.getClass().getTypeName() + "插件需配合com.itfsw.mybatis.generator.plugins.ModelColumnPlugin插件使用!");
return false;
}
// 插件位置
PluginTools.shouldAfterPlugins(getContext(), this.getClass(), UpsertPlugin.class);
return super.validate(warnings);
}
/**
* Model Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param topLevelClass
* @param introspectedTable
* @return
*/
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
// import
topLevelClass.addImportedType(FullyQualifiedJavaType.getNewMapInstance());
topLevelClass.addImportedType(FullyQualifiedJavaType.getNewHashMapInstance());
// field
Field selectiveColumnsField = new Field("selectiveColumns", new FullyQualifiedJavaType("Map<String, Boolean>"));
commentGenerator.addFieldComment(selectiveColumnsField, introspectedTable);
selectiveColumnsField.setVisibility(JavaVisibility.PRIVATE);
selectiveColumnsField.setInitializationString("new HashMap<String, Boolean>()");
topLevelClass.addField(selectiveColumnsField);
// Method isSelective
Method mIsSelective = new Method("isSelective");
commentGenerator.addGeneralMethodComment(mIsSelective, introspectedTable);
mIsSelective.setVisibility(JavaVisibility.PUBLIC);
mIsSelective.setReturnType(FullyQualifiedJavaType.getBooleanPrimitiveInstance());
mIsSelective.addBodyLine("return this.selectiveColumns.size() > 0;");
topLevelClass.addMethod(mIsSelective);
// Method isSelective
Method mIsSelective1 = new Method("isSelective");
commentGenerator.addGeneralMethodComment(mIsSelective1, introspectedTable);
mIsSelective1.setVisibility(JavaVisibility.PUBLIC);
mIsSelective1.setReturnType(FullyQualifiedJavaType.getBooleanPrimitiveInstance());
mIsSelective1.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "column"));
mIsSelective1.addBodyLine("return this.selectiveColumns.get(column) != null;");
topLevelClass.addMethod(mIsSelective1);
// Method selective
Method mSelective = new Method("selective");
commentGenerator.addGeneralMethodComment(mSelective, introspectedTable);
mSelective.setVisibility(JavaVisibility.PUBLIC);
mSelective.setReturnType(topLevelClass.getType());
mSelective.addParameter(new Parameter(new FullyQualifiedJavaType(ModelColumnPlugin.ENUM_NAME), "columns", true));
mSelective.addBodyLine("this.selectiveColumns.clear();");
mSelective.addBodyLine("if (columns != null) {");
mSelective.addBodyLine("for (" + ModelColumnPlugin.ENUM_NAME + " column : columns) {");
mSelective.addBodyLine("this.selectiveColumns.put(column.value(), true);");
mSelective.addBodyLine("}");
mSelective.addBodyLine("}");
mSelective.addBodyLine("return this;");
topLevelClass.addMethod(mSelective);
return true;
}
/**
* SQL Map Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param document
* @param introspectedTable
* @return
*/
@Override
public boolean sqlMapDocumentGenerated(Document document, IntrospectedTable introspectedTable) {
List<Element> rootElements = document.getRootElement().getElements();
for (Element rootElement : rootElements) {
if (rootElement instanceof XmlElement) {
XmlElement xmlElement = (XmlElement) rootElement;
List<Attribute> attributes = xmlElement.getAttributes();
// 查找ID
String id = "";
for (Attribute attribute : attributes) {
if (attribute.getName().equals("id")) {
id = attribute.getValue();
}
}
// ====================================== 1. insertSelective ======================================
if ("insertSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 2. updateByExampleSelective ======================================
if ("updateByExampleSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles) {
this.replaceEle(ele, "record.", introspectedTable);
}
}
// ====================================== 3. updateByPrimaryKeySelective ======================================
if ("updateByPrimaryKeySelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 4. upsertSelective ======================================
if ("upsertSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 5. upsertByExampleSelective ======================================
if ("upsertByExampleSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
this.replaceEle(eles.get(0), "record.", introspectedTable);
// upsertByExampleSelective的第二个trim比较特殊,需另行处理
this.replaceEleForUpsertByExampleSelective(eles.get(1), "record.", introspectedTable, !introspectedTable.getRules().generateRecordWithBLOBsClass());
List<XmlElement> eles1 = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles1) {
this.replaceEle(ele, "record.", introspectedTable);
}
}
// ====================================== 6. upsertSelectiveWithBLOBs ======================================
if ("upsertSelectiveWithBLOBs".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 7. upsertByExampleSelectiveWithBLOBs ======================================
if ("upsertByExampleSelectiveWithBLOBs".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
this.replaceEle(eles.get(0), "record.", introspectedTable);
// upsertByExampleSelective的第二个trim比较特殊,需另行处理
this.replaceEleForUpsertByExampleSelective(eles.get(1), "record.", introspectedTable, true);
List<XmlElement> eles1 = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles1) {
this.replaceEle(ele, "record.", introspectedTable);
}
}
}
}
return true;
}
/**
* 替换节点if信息
* @param element
* @param prefix
* @param introspectedTable
*/
private void replaceEle(XmlElement element, String prefix, IntrospectedTable introspectedTable) {
// choose
XmlElement chooseEle = new XmlElement("choose");
// when
XmlElement whenEle = new XmlElement("when");
whenEle.addAttribute(new Attribute("test", prefix + "isSelective()"));
for (Element ele : element.getElements()) {
// 对于字符串主键,是没有if判断节点的
if (ele instanceof XmlElement){
// if的text节点
XmlElement xmlElement = (XmlElement) ele;
// 找出field 名称
String text = ((TextElement) xmlElement.getElements().get(0)).getContent();
String columnName = "";
if (text.matches("#\\{.*\\},?")) {
Pattern pattern = Pattern.compile("#\\{(.*?),.*\\},?");
Matcher matcher = pattern.matcher(text);
if (matcher.find()){
String field = matcher.group(1);
// 查找对应column
for (IntrospectedColumn column : introspectedTable.getAllColumns()) {
if (column.getJavaProperty().equals(field)) {
columnName = column.getActualColumnName();
}
}
}
} else {
if (text.matches(".*=.*")){
columnName = text.split("=")[0];
} else {
columnName = text.replaceAll(",", "");
}
// bug fixed: 修正使用autoDelimitKeywords过滤关键词造成的field前后加了特殊字符的问题
columnName = columnName.trim().replaceAll("`", "").replaceAll("\"", "").replaceAll("'", "");
}
XmlElement ifEle = new XmlElement("if");
ifEle.addAttribute(new Attribute("test", prefix + "isSelective(\'" + MyBatis3FormattingUtilities.getEscapedColumnName(introspectedTable.getColumn(columnName)) + "\')"));
for (Element ifChild : xmlElement.getElements()){
ifEle.addElement(ifChild);
}
whenEle.addElement(ifEle);
} else {
whenEle.addElement(ele);
}
}
// otherwise
XmlElement otherwiseEle = new XmlElement("otherwise");
for (Element ele : element.getElements()) {
otherwiseEle.addElement(ele);
}
chooseEle.addElement(whenEle);
chooseEle.addElement(otherwiseEle);
// 清空原始节点,新增choose节点
element.getElements().clear();
element.addElement(chooseEle);
}
/**
* 替换节点upsertByExampleSelective if信息
* @param element
* @param prefix
* @param introspectedTable
* @param allColumns
*/
private void replaceEleForUpsertByExampleSelective(XmlElement element, String prefix, IntrospectedTable introspectedTable, boolean allColumns) {
// choose
XmlElement chooseEle = new XmlElement("choose");
// when
XmlElement whenEle = new XmlElement("when");
whenEle.addAttribute(new Attribute("test", prefix + "isSelective()"));
for (IntrospectedColumn introspectedColumn : (allColumns ? introspectedTable.getAllColumns() : introspectedTable.getNonBLOBColumns())) {
XmlElement eleIf = new XmlElement("if");
eleIf.addAttribute(new Attribute("test", prefix + "isSelective(\'" + introspectedColumn.getActualColumnName() + "\')"));
eleIf.addElement(new TextElement(MyBatis3FormattingUtilities.getParameterClause(introspectedColumn, prefix) + ","));
whenEle.addElement(eleIf);
}
// otherwise
XmlElement otherwiseEle = new XmlElement("otherwise");
for (Element ele : element.getElements()) {
otherwiseEle.addElement(ele);
}
chooseEle.addElement(whenEle);
chooseEle.addElement(otherwiseEle);
// 清空原始节点,新增choose节点
element.getElements().clear();
element.addElement(chooseEle);
}
}
|
src/main/java/com/itfsw/mybatis/generator/plugins/SelectiveEnhancedPlugin.java
|
/*
* Copyright (c) 2017.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itfsw.mybatis.generator.plugins;
import com.itfsw.mybatis.generator.plugins.utils.BasePlugin;
import com.itfsw.mybatis.generator.plugins.utils.PluginTools;
import com.itfsw.mybatis.generator.plugins.utils.XmlElementGeneratorTools;
import org.mybatis.generator.api.IntrospectedColumn;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.dom.java.*;
import org.mybatis.generator.api.dom.xml.*;
import org.mybatis.generator.codegen.mybatis3.MyBatis3FormattingUtilities;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* ---------------------------------------------------------------------------
* Selective 增强插件
* ---------------------------------------------------------------------------
* @author: hewei
* @time:2017/4/20 15:39
* ---------------------------------------------------------------------------
*/
public class SelectiveEnhancedPlugin extends BasePlugin {
/**
* {@inheritDoc}
*/
@Override
public boolean validate(List<String> warnings) {
// 插件使用前提是使用了ModelColumnPlugin插件
if (!PluginTools.checkDependencyPlugin(getContext(), ModelColumnPlugin.class)) {
logger.error("itfsw:插件" + this.getClass().getTypeName() + "插件需配合com.itfsw.mybatis.generator.plugins.ModelColumnPlugin插件使用!");
return false;
}
// 插件位置
PluginTools.shouldAfterPlugins(getContext(), this.getClass(), UpsertPlugin.class);
return super.validate(warnings);
}
/**
* Model Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param topLevelClass
* @param introspectedTable
* @return
*/
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
// import
topLevelClass.addImportedType(FullyQualifiedJavaType.getNewMapInstance());
topLevelClass.addImportedType(FullyQualifiedJavaType.getNewHashMapInstance());
// field
Field selectiveColumnsField = new Field("selectiveColumns", new FullyQualifiedJavaType("Map<String, Boolean>"));
commentGenerator.addFieldComment(selectiveColumnsField, introspectedTable);
selectiveColumnsField.setVisibility(JavaVisibility.PRIVATE);
selectiveColumnsField.setInitializationString("new HashMap<String, Boolean>()");
topLevelClass.addField(selectiveColumnsField);
// Method isSelective
Method mIsSelective = new Method("isSelective");
commentGenerator.addGeneralMethodComment(mIsSelective, introspectedTable);
mIsSelective.setVisibility(JavaVisibility.PUBLIC);
mIsSelective.setReturnType(FullyQualifiedJavaType.getBooleanPrimitiveInstance());
mIsSelective.addBodyLine("return this.selectiveColumns.size() > 0;");
topLevelClass.addMethod(mIsSelective);
// Method isSelective
Method mIsSelective1 = new Method("isSelective");
commentGenerator.addGeneralMethodComment(mIsSelective1, introspectedTable);
mIsSelective1.setVisibility(JavaVisibility.PUBLIC);
mIsSelective1.setReturnType(FullyQualifiedJavaType.getBooleanPrimitiveInstance());
mIsSelective1.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "column"));
mIsSelective1.addBodyLine("return this.selectiveColumns.get(column) != null;");
topLevelClass.addMethod(mIsSelective1);
// Method selective
Method mSelective = new Method("selective");
commentGenerator.addGeneralMethodComment(mSelective, introspectedTable);
mSelective.setVisibility(JavaVisibility.PUBLIC);
mSelective.setReturnType(topLevelClass.getType());
mSelective.addParameter(new Parameter(new FullyQualifiedJavaType(ModelColumnPlugin.ENUM_NAME), "columns", true));
mSelective.addBodyLine("this.selectiveColumns.clear();");
mSelective.addBodyLine("if (columns != null) {");
mSelective.addBodyLine("for (" + ModelColumnPlugin.ENUM_NAME + " column : columns) {");
mSelective.addBodyLine("this.selectiveColumns.put(column.value(), true);");
mSelective.addBodyLine("}");
mSelective.addBodyLine("}");
mSelective.addBodyLine("return this;");
topLevelClass.addMethod(mSelective);
return true;
}
/**
* SQL Map Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param document
* @param introspectedTable
* @return
*/
@Override
public boolean sqlMapDocumentGenerated(Document document, IntrospectedTable introspectedTable) {
List<Element> rootElements = document.getRootElement().getElements();
for (Element rootElement : rootElements) {
if (rootElement instanceof XmlElement) {
XmlElement xmlElement = (XmlElement) rootElement;
List<Attribute> attributes = xmlElement.getAttributes();
// 查找ID
String id = "";
for (Attribute attribute : attributes) {
if (attribute.getName().equals("id")) {
id = attribute.getValue();
}
}
// ====================================== 1. insertSelective ======================================
if ("insertSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 2. updateByExampleSelective ======================================
if ("updateByExampleSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles) {
this.replaceEle(ele, "record.", introspectedTable);
}
}
// ====================================== 3. updateByPrimaryKeySelective ======================================
if ("updateByPrimaryKeySelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 4. upsertSelective ======================================
if ("upsertSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 5. upsertByExampleSelective ======================================
if ("upsertByExampleSelective".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
this.replaceEle(eles.get(0), "record.", introspectedTable);
// upsertByExampleSelective的第二个trim比较特殊,需另行处理
this.replaceEleForUpsertByExampleSelective(eles.get(1), "record.", introspectedTable, !introspectedTable.getRules().generateRecordWithBLOBsClass());
List<XmlElement> eles1 = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles1) {
this.replaceEle(ele, "record.", introspectedTable);
}
}
// ====================================== 6. upsertSelectiveWithBLOBs ======================================
if ("upsertSelectiveWithBLOBs".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
for (XmlElement ele : eles) {
this.replaceEle(ele, "_parameter.", introspectedTable);
}
}
// ====================================== 7. upsertByExampleSelectiveWithBLOBs ======================================
if ("upsertByExampleSelectiveWithBLOBs".equals(id)) {
List<XmlElement> eles = XmlElementGeneratorTools.findXmlElements(xmlElement, "trim");
this.replaceEle(eles.get(0), "record.", introspectedTable);
// upsertByExampleSelective的第二个trim比较特殊,需另行处理
this.replaceEleForUpsertByExampleSelective(eles.get(1), "record.", introspectedTable, true);
List<XmlElement> eles1 = XmlElementGeneratorTools.findXmlElements(xmlElement, "set");
for (XmlElement ele : eles1) {
this.replaceEle(ele, "record.", introspectedTable);
}
}
}
}
return true;
}
/**
* 替换节点if信息
* @param element
* @param prefix
* @param introspectedTable
*/
private void replaceEle(XmlElement element, String prefix, IntrospectedTable introspectedTable) {
// choose
XmlElement chooseEle = new XmlElement("choose");
// when
XmlElement whenEle = new XmlElement("when");
whenEle.addAttribute(new Attribute("test", prefix + "isSelective()"));
for (Element ele : element.getElements()) {
// 对于字符串主键,是没有if判断节点的
if (ele instanceof XmlElement){
// if的text节点
XmlElement xmlElement = (XmlElement) ele;
// 找出field 名称
String text = ((TextElement) xmlElement.getElements().get(0)).getContent();
String field = "";
if (text.matches("#\\{.*\\},?")) {
Pattern pattern = Pattern.compile("#\\{(.*?),.*\\},?");
Matcher matcher = pattern.matcher(text);
if (matcher.find()){
field = matcher.group(1);
}
} else {
String columnName;
if (text.matches(".*=.*")){
columnName = text.split("=")[0];
} else {
columnName = text.replaceAll(",", "");
}
// bug fixed: 修正使用autoDelimitKeywords过滤关键词造成的field前后加了特殊字符的问题
columnName = columnName.trim().replaceAll("`", "").replaceAll("\"", "").replaceAll("'", "");
IntrospectedColumn column = introspectedTable.getColumn(columnName);
field = MyBatis3FormattingUtilities.getEscapedColumnName(column);
}
XmlElement ifEle = new XmlElement("if");
ifEle.addAttribute(new Attribute("test", prefix + "isSelective(\'" + field + "\')"));
for (Element ifChild : xmlElement.getElements()){
ifEle.addElement(ifChild);
}
whenEle.addElement(ifEle);
} else {
whenEle.addElement(ele);
}
}
// otherwise
XmlElement otherwiseEle = new XmlElement("otherwise");
for (Element ele : element.getElements()) {
otherwiseEle.addElement(ele);
}
chooseEle.addElement(whenEle);
chooseEle.addElement(otherwiseEle);
// 清空原始节点,新增choose节点
element.getElements().clear();
element.addElement(chooseEle);
}
/**
* 替换节点upsertByExampleSelective if信息
* @param element
* @param prefix
* @param introspectedTable
* @param allColumns
*/
private void replaceEleForUpsertByExampleSelective(XmlElement element, String prefix, IntrospectedTable introspectedTable, boolean allColumns) {
// choose
XmlElement chooseEle = new XmlElement("choose");
// when
XmlElement whenEle = new XmlElement("when");
whenEle.addAttribute(new Attribute("test", prefix + "isSelective()"));
for (IntrospectedColumn introspectedColumn : (allColumns ? introspectedTable.getAllColumns() : introspectedTable.getNonBLOBColumns())) {
XmlElement eleIf = new XmlElement("if");
eleIf.addAttribute(new Attribute("test", prefix + "isSelective(\'" + introspectedColumn.getActualColumnName() + "\')"));
eleIf.addElement(new TextElement(MyBatis3FormattingUtilities.getParameterClause(introspectedColumn, prefix) + ","));
whenEle.addElement(eleIf);
}
// otherwise
XmlElement otherwiseEle = new XmlElement("otherwise");
for (Element ele : element.getElements()) {
otherwiseEle.addElement(ele);
}
chooseEle.addElement(whenEle);
chooseEle.addElement(otherwiseEle);
// 清空原始节点,新增choose节点
element.getElements().clear();
element.addElement(chooseEle);
}
}
|
bugfix: Selective增强插件在对于 #{xxx,jdbcType=INTEGER},这种节点处理时之前采用正则取column名称是错误的,这里取得的其实是model的属性名称
|
src/main/java/com/itfsw/mybatis/generator/plugins/SelectiveEnhancedPlugin.java
|
bugfix: Selective增强插件在对于 #{xxx,jdbcType=INTEGER},这种节点处理时之前采用正则取column名称是错误的,这里取得的其实是model的属性名称
|
|
Java
|
apache-2.0
|
5339aea7393a107a88022175255e79b4a965ec44
| 0
|
DT9/osmdroid,osmdroid/osmdroid,ak-67/osmdroid,ak-67/osmdroid,DT9/osmdroid,sibext/osmdroid-1,Sarfarazsajjad/osmdroid,fpoyer/osmdroid,1nv4d3r5/osmdroid,osmdroid/osmdroid,prembasumatary/osmdroid,fpoyer/osmdroid,osmdroid/osmdroid,sibext/osmdroid-1,dozd/osmdroid,microg/android_external_osmdroid,osmdroid/osmdroid,Sarfarazsajjad/osmdroid,mozilla/osmdroid,DShamaev/osmdroid,dozd/osmdroid,DShamaev/osmdroid,microg/android_external_osmdroid,GeoODK/osmdroid,GeoODK/osmdroid,1nv4d3r5/osmdroid,prembasumatary/osmdroid,hyl1987419/osmdroid,hyl1987419/osmdroid,beemogmbh/osmdroid,beemogmbh/osmdroid
|
// Created by plusminus on 21:46:41 - 25.09.2008
package org.andnav.osm.tileprovider;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.andnav.osm.views.util.OpenStreetMapRendererInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Nicolas Gramlich
*
*/
public class OpenStreetMapTileFilesystemProvider extends OpenStreetMapAsyncTileProvider {
// ===========================================================
// Constants
// ===========================================================
private static final Logger logger = LoggerFactory.getLogger(OpenStreetMapTileFilesystemProvider.class);
// ===========================================================
// Fields
// ===========================================================
/** online provider */
protected OpenStreetMapTileDownloader mTileDownloader;
// ===========================================================
// Constructors
// ===========================================================
/**
* @param ctx
* @param aCache to load fs-tiles to.
*/
public OpenStreetMapTileFilesystemProvider(final IOpenStreetMapTileProviderCallback pCallback) {
super(pCallback, NUMBER_OF_TILE_FILESYSTEM_THREADS, TILE_FILESYSTEM_MAXIMUM_QUEUE_SIZE);
this.mTileDownloader = new OpenStreetMapTileDownloader(pCallback, this);
}
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods from SuperClass/Interfaces
// ===========================================================
@Override
protected String threadGroupName() {
return "filesystem";
}
@Override
protected Runnable getTileLoader() {
return new TileLoader();
};
/**
* Stops all workers, the service is shutting down.
*/
@Override
public void stopWorkers()
{
super.stopWorkers();
this.mTileDownloader.stopWorkers();
}
// ===========================================================
// Methods
// ===========================================================
private String buildPath(final OpenStreetMapTile tile) {
final OpenStreetMapRendererInfo renderer = OpenStreetMapRendererInfo.values()[tile.getRendererId()];
return TILE_PATH_BASE + renderer.name() + "/" + tile.getZoomLevel() + "/"
+ tile.getX() + "/" + tile.getY() + renderer.IMAGE_FILENAMEENDING + TILE_PATH_EXTENSION;
}
/**
* Get the file location for the tile.
* @param tile
* @return
* @throws CantContinueException if the directory containing the file doesn't exist
* and can't be created
*/
File getOutputFile(final OpenStreetMapTile tile) throws CantContinueException {
final File file = new File(buildPath(tile));
final File parent = file.getParentFile();
// check exists twice because maybe mkdirs returned false because another thread created it
if (!parent.exists() && !parent.mkdirs() && !parent.exists()) {
throw new CantContinueException("Tile directory doesn't exist: " + parent);
}
return file;
}
void saveFile(final OpenStreetMapTile tile, final File outputFile, final byte[] someData) throws IOException{
final OutputStream bos = new BufferedOutputStream(new FileOutputStream(outputFile, false), StreamUtils.IO_BUFFER_SIZE);
bos.write(someData);
bos.flush();
bos.close();
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
private class TileLoader extends OpenStreetMapAsyncTileProvider.TileLoader {
@Override
public void loadTile(final OpenStreetMapTile aTile) throws CantContinueException {
final File tileFile = getOutputFile(aTile);
try {
if (tileFile.exists()) {
if (DEBUGMODE)
logger.debug("Loaded tile: " + aTile);
tileLoaded(aTile, tileFile.getPath(), true);
} else {
if (DEBUGMODE)
logger.debug("Tile not exist, request for download: " + aTile);
mTileDownloader.loadMapTileAsync(aTile);
// don't refresh the screen because there's nothing new
tileLoaded(aTile, null, false);
}
} catch (final Throwable e) {
logger.error("Error loading tile", e);
tileLoaded(aTile, null, false);
}
}
}
}
|
osmdroid-android/src/org/andnav/osm/tileprovider/OpenStreetMapTileFilesystemProvider.java
|
// Created by plusminus on 21:46:41 - 25.09.2008
package org.andnav.osm.tileprovider;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.andnav.osm.views.util.OpenStreetMapRendererInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Nicolas Gramlich
*
*/
public class OpenStreetMapTileFilesystemProvider extends OpenStreetMapAsyncTileProvider {
// ===========================================================
// Constants
// ===========================================================
private static final Logger logger = LoggerFactory.getLogger(OpenStreetMapTileFilesystemProvider.class);
// ===========================================================
// Fields
// ===========================================================
/** online provider */
protected OpenStreetMapTileDownloader mTileDownloader;
// ===========================================================
// Constructors
// ===========================================================
/**
* @param ctx
* @param aCache to load fs-tiles to.
*/
public OpenStreetMapTileFilesystemProvider(final IOpenStreetMapTileProviderCallback pCallback) {
super(pCallback, NUMBER_OF_TILE_FILESYSTEM_THREADS, TILE_FILESYSTEM_MAXIMUM_QUEUE_SIZE);
this.mTileDownloader = new OpenStreetMapTileDownloader(pCallback, this);
}
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods from SuperClass/Interfaces
// ===========================================================
@Override
protected String threadGroupName() {
return "filesystem";
}
@Override
protected Runnable getTileLoader() {
return new TileLoader();
};
/**
* Stops all workers, the service is shutting down.
*/
@Override
public void stopWorkers()
{
super.stopWorkers();
this.mTileDownloader.stopWorkers();
}
// ===========================================================
// Methods
// ===========================================================
private String buildPath(final OpenStreetMapTile tile) {
final OpenStreetMapRendererInfo renderer = OpenStreetMapRendererInfo.values()[tile.getRendererId()];
return TILE_PATH_BASE + renderer.name() + "/" + tile.getZoomLevel() + "/"
+ tile.getX() + "/" + tile.getY() + renderer.IMAGE_FILENAMEENDING + TILE_PATH_EXTENSION;
}
/**
* Get the file location for the tile.
* @param tile
* @return
* @throws CantContinueException if the directory containing the file doesn't exist
* and can't be created
*/
File getOutputFile(final OpenStreetMapTile tile) throws CantContinueException {
final File file = new File(buildPath(tile));
final File parent = file.getParentFile();
if (!parent.exists() && !parent.mkdirs()) {
throw new CantContinueException("Tile directory doesn't exist: " + parent);
}
return file;
}
void saveFile(final OpenStreetMapTile tile, final File outputFile, final byte[] someData) throws IOException{
final OutputStream bos = new BufferedOutputStream(new FileOutputStream(outputFile, false), StreamUtils.IO_BUFFER_SIZE);
bos.write(someData);
bos.flush();
bos.close();
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
private class TileLoader extends OpenStreetMapAsyncTileProvider.TileLoader {
@Override
public void loadTile(final OpenStreetMapTile aTile) throws CantContinueException {
final File tileFile = getOutputFile(aTile);
try {
if (tileFile.exists()) {
if (DEBUGMODE)
logger.debug("Loaded tile: " + aTile);
tileLoaded(aTile, tileFile.getPath(), true);
} else {
if (DEBUGMODE)
logger.debug("Tile not exist, request for download: " + aTile);
mTileDownloader.loadMapTileAsync(aTile);
// don't refresh the screen because there's nothing new
tileLoaded(aTile, null, false);
}
} catch (final Throwable e) {
logger.error("Error loading tile", e);
tileLoaded(aTile, null, false);
}
}
}
}
|
check for asynchronous directory creation
|
osmdroid-android/src/org/andnav/osm/tileprovider/OpenStreetMapTileFilesystemProvider.java
|
check for asynchronous directory creation
|
|
Java
|
apache-2.0
|
6a4e930b00b9455d4001c883b4f6794004c6380c
| 0
|
Appstrakt/ListViewAnimations
|
/*
* Copyright (C) 2013 The Android Open Source Project
* Copyright 2013 Niek Haarman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nhaarman.listviewanimations.widget;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.HeaderViewListAdapter;
import android.widget.ListAdapter;
import android.widget.ListView;
import com.nhaarman.listviewanimations.itemmanipulation.swipedismiss.SwipeOnTouchListener;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.animation.ObjectAnimator;
import com.nineoldandroids.animation.TypeEvaluator;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.view.ViewHelper;
/**
* The DynamicListView is an extension of {@link ListView} that supports cell dragging
* and swapping.
* </p>
* Make sure your adapter has stable ids, and override {@link ListAdapter#hasStableIds()} to return true.</br>
* </p>
* This layout is in charge of positioning the hover cell in the correct location
* on the screen in response to user touch events. It uses the position of the
* hover cell to determine when two cells should be swapped. If two cells should
* be swapped, all the corresponding data set and layout changes are handled here.
* </p>
* If no cell is selected, all the touch events are passed down to the ListView
* and behave normally. If one of the items in the ListView experiences a
* long press event, the contents of its current visible state are captured as
* a bitmap and its visibility is set to INVISIBLE. A hover cell is then created and
* added to this layout as an overlaying BitmapDrawable above the ListView. Once the
* hover cell is translated some distance to signify an item swap, a data set change
* accompanied by animation takes place. When the user releases the hover cell,
* it animates into its corresponding position in the ListView.
* </p>
* When the hover cell is either above or below the bounds of the ListView, this
* ListView also scrolls on its own so as to reveal additional content.
* </p>
* See http://youtu.be/_BZIvjMgH-Q
*/
public class DynamicListView extends ListView {
private int mOriginalTranscriptMode;
public interface OnHoverCellListener {
public Drawable onHoverCellCreated(Drawable hoverCellDrawable);
}
/**
* Implement this interface to be notified of ordering changes. Call {@link #setOnItemMovedListener(com.nhaarman.listviewanimations.widget.DynamicListView.OnItemMovedListener)}.
*/
public interface OnItemMovedListener {
/**
* Called after an item is dropped and moved.
*
* @param newPosition the new position of the item.
*/
public void onItemMoved(int newPosition);
}
private final int SMOOTH_SCROLL_AMOUNT_AT_EDGE = 15;
private final int MOVE_DURATION = 150;
private int mLastEventY = -1, mLastEventX = -1;
private int mDownY = -1;
private int mDownX = -1;
private int mTotalOffset = 0;
private boolean mCellIsMobile = false;
private boolean mIsMobileScrolling = false;
private int mSmoothScrollAmountAtEdge = 0;
private final int INVALID_ID = -1;
private long mAboveItemId = INVALID_ID;
private long mMobileItemId = INVALID_ID;
private long mBelowItemId = INVALID_ID;
private Drawable mHoverCell;
private Rect mHoverCellCurrentBounds;
private Rect mHoverCellOriginalBounds;
private final int INVALID_POINTER_ID = -1;
private int mActivePointerId = INVALID_POINTER_ID;
private boolean mIsWaitingForScrollFinish = false;
private int mScrollState = OnScrollListener.SCROLL_STATE_IDLE;
private OnTouchListener mOnTouchListener;
private boolean mIsParentHorizontalScrollContainer;
private int mResIdOfDynamicTouchChild;
private boolean mDynamicTouchChildTouched;
private int mSlop;
private boolean mSkipCallingOnTouchListener;
private OnHoverCellListener mOnHoverCellListener;
private OnItemMovedListener mOnItemMovedListener;
private int mLastMovedToIndex;
public DynamicListView(Context context) {
super(context);
init(context);
}
public DynamicListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
public DynamicListView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public void init(Context context) {
setOnItemLongClickListener(mOnItemLongClickListener);
setOnScrollListener(mScrollListener);
DisplayMetrics metrics = context.getResources().getDisplayMetrics();
mSmoothScrollAmountAtEdge = (int) (SMOOTH_SCROLL_AMOUNT_AT_EDGE / metrics.density);
ViewConfiguration vc = ViewConfiguration.get(getContext());
mSlop = vc.getScaledTouchSlop();
}
public void setAdapter(BaseAdapter adapter) {
super.setAdapter(adapter);
}
@Override
@Deprecated
/**
* @deprecated use #setAdapter(BaseAdapter) instead.
*/
public void setAdapter(ListAdapter adapter) {
if (!(adapter instanceof BaseAdapter)) {
throw new IllegalArgumentException("DynamicListView needs a BaseAdapter!");
}
super.setAdapter(adapter);
}
/**
* Listens for long clicks on any items in the listview. When a cell has
* been selected, the hover cell is created and set up.
*/
private OnItemLongClickListener mOnItemLongClickListener = new OnItemLongClickListener() {
public boolean onItemLongClick(AdapterView<?> arg0, View arg1, int pos, long id) {
if (mResIdOfDynamicTouchChild == 0) {
mDynamicTouchChildTouched = true;
makeCellMobile();
return true;
}
return false;
}
};
private void makeCellMobile() {
int position = pointToPosition(mDownX, mDownY);
int itemNum = position - getFirstVisiblePosition();
View selectedView = getChildAt(itemNum);
if (selectedView == null || position < getHeaderViewsCount() || position >= getAdapter().getCount() - getHeaderViewsCount() - getFooterViewsCount()) {
return;
}
mOriginalTranscriptMode = getTranscriptMode();
setTranscriptMode(TRANSCRIPT_MODE_NORMAL);
mTotalOffset = 0;
mMobileItemId = getAdapter().getItemId(position);
mHoverCell = getAndAddHoverView(selectedView);
if (mOnHoverCellListener != null) {
mHoverCell = mOnHoverCellListener.onHoverCellCreated(mHoverCell);
}
selectedView.setVisibility(INVISIBLE);
mCellIsMobile = true;
getParent().requestDisallowInterceptTouchEvent(true);
updateNeighborViewsForId(mMobileItemId);
}
/**
* Creates the hover cell with the appropriate bitmap and of appropriate
* size. The hover cell's BitmapDrawable is drawn on top of the bitmap every
* single time an invalidate call is made.
*/
private BitmapDrawable getAndAddHoverView(View v) {
int w = v.getWidth();
int h = v.getHeight();
int top = v.getTop();
int left = v.getLeft();
Bitmap b = getBitmapFromView(v);
BitmapDrawable drawable = new BitmapDrawable(getResources(), b);
mHoverCellOriginalBounds = new Rect(left, top, left + w, top + h);
mHoverCellCurrentBounds = new Rect(mHoverCellOriginalBounds);
drawable.setBounds(mHoverCellCurrentBounds);
return drawable;
}
/**
* Returns a bitmap showing a screenshot of the view passed in.
*/
private Bitmap getBitmapFromView(View v) {
Bitmap bitmap = Bitmap.createBitmap(v.getWidth(), v.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
v.draw(canvas);
return bitmap;
}
/**
* Stores a reference to the views above and below the item currently
* corresponding to the hover cell. It is important to note that if this
* item is either at the top or bottom of the list, mAboveItemId or mBelowItemId
* may be invalid.
*/
private void updateNeighborViewsForId(long itemId) {
int position = getPositionForId(itemId);
ListAdapter adapter = getAdapter();
if (!adapter.hasStableIds()) {
throw new IllegalStateException("Adapter doesn't have stable ids! Make sure your adapter has stable ids, and override hasStableIds() to return true.");
}
mAboveItemId = position - 1 >= 0 ? adapter.getItemId(position - 1) : INVALID_ROW_ID;
mBelowItemId = position + 1 < adapter.getCount() ? adapter.getItemId(position + 1) : INVALID_ROW_ID;
}
/**
* Retrieves the view in the list corresponding to itemId
*/
private View getViewForId(long itemId) {
int firstVisiblePosition = getFirstVisiblePosition();
ListAdapter adapter = getAdapter();
if (!adapter.hasStableIds()) {
throw new IllegalStateException("Adapter doesn't have stable ids! Make sure your adapter has stable ids, and override hasStableIds() to return true.");
}
for (int i = 0; i < getChildCount(); i++) {
View v = getChildAt(i);
int position = firstVisiblePosition + i;
long id = adapter.getItemId(position);
if (id == itemId) {
return v;
}
}
return null;
}
/**
* Retrieves the position in the list corresponding to itemId
*/
private int getPositionForId(long itemId) {
View v = getViewForId(itemId);
if (v == null) {
return -1;
} else {
return getPositionForView(v);
}
}
/**
* dispatchDraw gets invoked when all the child views are about to be drawn.
* By overriding this method, the hover cell (BitmapDrawable) can be drawn
* over the listview's items whenever the listview is redrawn.
*/
@Override
protected void dispatchDraw(Canvas canvas) {
super.dispatchDraw(canvas);
if (mHoverCell != null) {
mHoverCell.draw(canvas);
}
}
@Override
public void setOnTouchListener(OnTouchListener l) {
mOnTouchListener = l;
}
public void setOnHoverCellListener(OnHoverCellListener onHoverCellListener) {
mOnHoverCellListener = onHoverCellListener;
}
private Rect getChildViewRect(View parentView, View childView) {
final Rect childRect = new Rect(childView.getLeft(), childView.getTop(), childView.getRight(), childView.getBottom());
if (parentView == childView) {
return childRect;
}
ViewGroup parent;
while ((parent = (ViewGroup) childView.getParent()) != parentView) {
childRect.offset(parent.getLeft(), parent.getTop());
childView = parent;
}
return childRect;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (mSkipCallingOnTouchListener) {
return super.onTouchEvent(event);
}
if (mOnTouchListener instanceof SwipeOnTouchListener) {
if (((SwipeOnTouchListener) mOnTouchListener).isSwiping()) {
mSkipCallingOnTouchListener = true;
boolean retVal = mOnTouchListener.onTouch(this, event);
mSkipCallingOnTouchListener = false;
return retVal || super.onTouchEvent(event);
}
}
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
mDownX = (int) event.getX();
mDownY = (int) event.getY();
mActivePointerId = event.getPointerId(0);
mDynamicTouchChildTouched = false;
if (mResIdOfDynamicTouchChild != 0) {
mIsParentHorizontalScrollContainer = false;
int position = pointToPosition(mDownX, mDownY);
int childNum = (position != INVALID_POSITION) ? position - getFirstVisiblePosition() : -1;
View itemView = (childNum >= 0) ? getChildAt(childNum) : null;
View childView = (itemView != null) ? itemView.findViewById(mResIdOfDynamicTouchChild) : null;
if (childView != null) {
final Rect childRect = getChildViewRect(this, childView);
if (childRect.contains(mDownX, mDownY)) {
mDynamicTouchChildTouched = true;
getParent().requestDisallowInterceptTouchEvent(true);
}
}
}
if (mIsParentHorizontalScrollContainer) {
// Do it now and don't wait until the user moves more than the
// slop factor.
getParent().requestDisallowInterceptTouchEvent(true);
}
break;
case MotionEvent.ACTION_MOVE:
if (mActivePointerId == INVALID_POINTER_ID) {
break;
}
int pointerIndex = event.findPointerIndex(mActivePointerId);
mLastEventY = (int) event.getY(pointerIndex);
mLastEventX = (int) event.getX(pointerIndex);
int deltaY = mLastEventY - mDownY;
int deltaX = mLastEventX - mDownX;
if (!mCellIsMobile && mDynamicTouchChildTouched) {
if (Math.abs(deltaY) > mSlop && Math.abs(deltaY) > Math.abs(deltaX)) {
makeCellMobile();
// Cancel ListView's touch (un-highlighting the item)
MotionEvent cancelEvent = MotionEvent.obtain(event);
cancelEvent.setAction(MotionEvent.ACTION_CANCEL | (event.getActionIndex() << MotionEvent.ACTION_POINTER_INDEX_SHIFT));
super.onTouchEvent(cancelEvent);
cancelEvent.recycle();
}
}
if (mCellIsMobile) {
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left, mHoverCellOriginalBounds.top + deltaY + mTotalOffset);
mHoverCell.setBounds(mHoverCellCurrentBounds);
invalidate();
handleCellSwitch();
mIsMobileScrolling = false;
handleMobileCellScroll();
}
break;
case MotionEvent.ACTION_UP:
mDynamicTouchChildTouched = false;
touchEventsEnded();
break;
case MotionEvent.ACTION_CANCEL:
mDynamicTouchChildTouched = false;
touchEventsCancelled();
break;
case MotionEvent.ACTION_POINTER_UP:
/*
* If a multitouch event took place and the original touch dictating
* the movement of the hover cell has ended, then the dragging event
* ends and the hover cell is animated to its corresponding position
* in the listview.
*/
pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(pointerIndex);
if (pointerId == mActivePointerId) {
mDynamicTouchChildTouched = false;
touchEventsEnded();
}
break;
default:
break;
}
if (mCellIsMobile) {
return false;
} else if (mOnTouchListener != null) {
mSkipCallingOnTouchListener = true;
boolean retVal = mOnTouchListener.onTouch(this, event);
mSkipCallingOnTouchListener = false;
if (retVal) {
return true;
}
}
return super.onTouchEvent(event);
}
/**
* This method determines whether the hover cell has been shifted far enough
* to invoke a cell swap. If so, then the respective cell swap candidate is
* determined and the data set is changed. Upon posting a notification of the
* data set change, a layout is invoked to place the cells in the right place.
* Using a ViewTreeObserver and a corresponding OnPreDrawListener, we can
* offset the cell being swapped to where it previously was and then animate it to
* its new position.
*/
private void handleCellSwitch() {
final int deltaY = mLastEventY - mDownY;
int deltaYTotal = mHoverCellOriginalBounds.top + mTotalOffset + deltaY;
View belowView = getViewForId(mBelowItemId);
View mobileView = getViewForId(mMobileItemId);
View aboveView = getViewForId(mAboveItemId);
boolean isBelow = (belowView != null) && (deltaYTotal > belowView.getTop());
boolean isAbove = (aboveView != null) && (deltaYTotal < aboveView.getTop());
if (isBelow || isAbove) {
final long switchItemId = isBelow ? mBelowItemId : mAboveItemId;
View switchView = isBelow ? belowView : aboveView;
final int originalItem = getPositionForView(mobileView);
if (switchView == null) {
updateNeighborViewsForId(mMobileItemId);
return;
}
if (getPositionForView(switchView) < getHeaderViewsCount()) {
return;
}
swapElements(originalItem, getPositionForView(switchView));
BaseAdapter adapter;
if (getAdapter() instanceof HeaderViewListAdapter) {
adapter = (BaseAdapter) ((HeaderViewListAdapter) getAdapter()).getWrappedAdapter();
} else {
adapter = (BaseAdapter) getAdapter();
}
adapter.notifyDataSetChanged();
mDownY = mLastEventY;
mDownX = mLastEventX;
final int switchViewStartTop = switchView.getTop();
mobileView.setVisibility(View.VISIBLE);
switchView.setVisibility(View.INVISIBLE);
updateNeighborViewsForId(mMobileItemId);
final ViewTreeObserver observer = getViewTreeObserver();
observer.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
observer.removeOnPreDrawListener(this);
View switchView = getViewForId(switchItemId);
mTotalOffset += deltaY;
int switchViewNewTop = switchView.getTop();
int delta = switchViewStartTop - switchViewNewTop;
ViewHelper.setTranslationY(switchView, delta);
ObjectAnimator animator = ObjectAnimator.ofFloat(switchView, "translationY", 0);
animator.setDuration(MOVE_DURATION);
animator.start();
return true;
}
});
}
}
private void swapElements(int indexOne, int indexTwo) {
mLastMovedToIndex = indexTwo;
ListAdapter adapter = getAdapter();
if (adapter instanceof HeaderViewListAdapter) {
adapter = ((HeaderViewListAdapter) adapter).getWrappedAdapter();
}
if (adapter instanceof Swappable) {
((Swappable) adapter).swapItems(indexOne - getHeaderViewsCount(), indexTwo - getHeaderViewsCount());
}
}
/**
* Resets all the appropriate fields to a default state while also animating
* the hover cell back to its correct location.
*/
private void touchEventsEnded() {
final View mobileView = getViewForId(mMobileItemId);
if (mCellIsMobile || mIsWaitingForScrollFinish) {
mCellIsMobile = false;
mIsWaitingForScrollFinish = false;
mIsMobileScrolling = false;
mActivePointerId = INVALID_POINTER_ID;
/* Restore the transcript mode */
setTranscriptMode(mOriginalTranscriptMode);
// If the autoscroller has not completed scrolling, we need to wait
// for it to
// finish in order to determine the final location of where the
// hover cell
// should be animated to.
if (mScrollState != OnScrollListener.SCROLL_STATE_IDLE) {
mIsWaitingForScrollFinish = true;
return;
}
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left, mobileView.getTop());
ObjectAnimator hoverViewAnimator = ObjectAnimator.ofObject(mHoverCell, "bounds", sBoundEvaluator, mHoverCellCurrentBounds);
hoverViewAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
invalidate();
}
});
hoverViewAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
setEnabled(false);
}
@Override
public void onAnimationEnd(Animator animation) {
mAboveItemId = INVALID_ID;
mMobileItemId = INVALID_ID;
mBelowItemId = INVALID_ID;
mobileView.setVisibility(VISIBLE);
mHoverCell = null;
setEnabled(true);
invalidate();
if (mOnItemMovedListener != null) {
mOnItemMovedListener.onItemMoved(mLastMovedToIndex - getHeaderViewsCount());
}
}
});
hoverViewAnimator.start();
} else {
touchEventsCancelled();
}
}
/**
* Resets all the appropriate fields to a default state.
*/
private void touchEventsCancelled() {
View mobileView = getViewForId(mMobileItemId);
if (mCellIsMobile) {
mAboveItemId = INVALID_ID;
mMobileItemId = INVALID_ID;
mBelowItemId = INVALID_ID;
mobileView.setVisibility(VISIBLE);
mHoverCell = null;
invalidate();
}
mCellIsMobile = false;
mIsMobileScrolling = false;
mActivePointerId = INVALID_POINTER_ID;
}
/**
* This TypeEvaluator is used to animate the BitmapDrawable back to its
* final location when the user lifts his finger by modifying the
* BitmapDrawable's bounds.
*/
private final static TypeEvaluator<Rect> sBoundEvaluator = new TypeEvaluator<Rect>() {
public Rect evaluate(float fraction, Rect startValue, Rect endValue) {
return new Rect(interpolate(startValue.left, endValue.left, fraction), interpolate(startValue.top, endValue.top, fraction), interpolate(startValue.right, endValue.right, fraction),
interpolate(startValue.bottom, endValue.bottom, fraction));
}
public int interpolate(int start, int end, float fraction) {
return (int) (start + fraction * (end - start));
}
};
/**
* Determines whether this listview is in a scrolling state invoked
* by the fact that the hover cell is out of the bounds of the listview;
*/
private void handleMobileCellScroll() {
mIsMobileScrolling = handleMobileCellScroll(mHoverCellCurrentBounds);
}
/**
* This method is in charge of determining if the hover cell is above
* or below the bounds of the listview. If so, the listview does an appropriate
* upward or downward smooth scroll so as to reveal new items.
*/
private boolean handleMobileCellScroll(Rect r) {
int offset = computeVerticalScrollOffset();
int height = getHeight();
int extent = computeVerticalScrollExtent();
int range = computeVerticalScrollRange();
int hoverViewTop = r.top;
int hoverHeight = r.height();
if (hoverViewTop <= 0 && offset > 0) {
smoothScrollBy(-mSmoothScrollAmountAtEdge, 0);
return true;
}
if (hoverViewTop + hoverHeight >= height && (offset + extent) < range) {
smoothScrollBy(mSmoothScrollAmountAtEdge, 0);
return true;
}
return false;
}
public void setIsParentHorizontalScrollContainer(boolean isParentHorizontalScrollContainer) {
mIsParentHorizontalScrollContainer = (mResIdOfDynamicTouchChild == 0) && isParentHorizontalScrollContainer;
}
public boolean isParentHorizontalScrollContainer() {
return mIsParentHorizontalScrollContainer;
}
public void setDynamicTouchChild(int childResId) {
mResIdOfDynamicTouchChild = childResId;
if (childResId != 0) {
setIsParentHorizontalScrollContainer(false);
}
}
/**
* This scroll listener is added to the listview in order to handle cell swapping
* when the cell is either at the top or bottom edge of the listview. If the hover
* cell is at either edge of the listview, the listview will begin scrolling. As
* scrolling takes place, the listview continuously checks if new cells became visible
* and determines whether they are potential candidates for a cell swap.
*/
private OnScrollListener mScrollListener = new OnScrollListener() {
private int mPreviousFirstVisibleItem = -1;
private int mPreviousVisibleItemCount = -1;
private int mCurrentFirstVisibleItem;
private int mCurrentVisibleItemCount;
private int mCurrentScrollState;
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
mCurrentFirstVisibleItem = firstVisibleItem;
mCurrentVisibleItemCount = visibleItemCount;
mPreviousFirstVisibleItem = (mPreviousFirstVisibleItem == -1) ? mCurrentFirstVisibleItem : mPreviousFirstVisibleItem;
mPreviousVisibleItemCount = (mPreviousVisibleItemCount == -1) ? mCurrentVisibleItemCount : mPreviousVisibleItemCount;
checkAndHandleFirstVisibleCellChange();
checkAndHandleLastVisibleCellChange();
mPreviousFirstVisibleItem = mCurrentFirstVisibleItem;
mPreviousVisibleItemCount = mCurrentVisibleItemCount;
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
mCurrentScrollState = scrollState;
mScrollState = scrollState;
isScrollCompleted();
}
/**
* This method is in charge of invoking 1 of 2 actions. Firstly, if the listview
* is in a state of scrolling invoked by the hover cell being outside the bounds
* of the listview, then this scrolling event is continued. Secondly, if the hover
* cell has already been released, this invokes the animation for the hover cell
* to return to its correct position after the listview has entered an idle scroll
* state.
*/
private void isScrollCompleted() {
if (mCurrentVisibleItemCount > 0 && mCurrentScrollState == SCROLL_STATE_IDLE) {
if (mCellIsMobile && mIsMobileScrolling) {
handleMobileCellScroll();
} else if (mIsWaitingForScrollFinish) {
touchEventsEnded();
}
}
}
/**
* Determines if the listview scrolled up enough to reveal a new cell at the
* top of the list. If so, then the appropriate parameters are updated.
*/
public void checkAndHandleFirstVisibleCellChange() {
if (mCurrentFirstVisibleItem != mPreviousFirstVisibleItem) {
if (mCellIsMobile && mMobileItemId != INVALID_ID) {
updateNeighborViewsForId(mMobileItemId);
handleCellSwitch();
}
}
}
/**
* Determines if the listview scrolled down enough to reveal a new cell at the
* bottom of the list. If so, then the appropriate parameters are updated.
*/
public void checkAndHandleLastVisibleCellChange() {
int currentLastVisibleItem = mCurrentFirstVisibleItem + mCurrentVisibleItemCount;
int previousLastVisibleItem = mPreviousFirstVisibleItem + mPreviousVisibleItemCount;
if (currentLastVisibleItem != previousLastVisibleItem) {
if (mCellIsMobile && mMobileItemId != INVALID_ID) {
updateNeighborViewsForId(mMobileItemId);
handleCellSwitch();
}
}
}
};
/**
* Set the {@link com.nhaarman.listviewanimations.widget.DynamicListView.OnItemMovedListener} to be notified when an item is dropped.
*/
public void setOnItemMovedListener(OnItemMovedListener onItemMovedListener) {
this.mOnItemMovedListener = onItemMovedListener;
}
/**
* Interface, usually implemented by a {@link com.nhaarman.listviewanimations.BaseAdapterDecorator},
* that indicates that it can swap the visual position of two list items.
*
* @author Anton Spaans on 9/11/13.
*/
public interface Swappable {
/**
* Swaps the item on the first adapter position with the item on the second adapter position.
* Be sure to call {@link android.widget.BaseAdapter#notifyDataSetChanged()} if appropriate when implementing this method.
*
* @param positionOne First adapter position.
* @param positionTwo Second adapter position.
*/
public void swapItems(int positionOne, int positionTwo);
}
}
|
library/src/com/nhaarman/listviewanimations/widget/DynamicListView.java
|
/*
* Copyright (C) 2013 The Android Open Source Project
* Copyright 2013 Niek Haarman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nhaarman.listviewanimations.widget;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.HeaderViewListAdapter;
import android.widget.ListAdapter;
import android.widget.ListView;
import com.nhaarman.listviewanimations.itemmanipulation.swipedismiss.SwipeOnTouchListener;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.animation.ObjectAnimator;
import com.nineoldandroids.animation.TypeEvaluator;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.view.ViewHelper;
/**
* The DynamicListView is an extension of {@link ListView} that supports cell dragging
* and swapping.
* </p>
* Make sure your adapter has stable ids, and override {@link ListAdapter#hasStableIds()} to return true.</br>
* </p>
* This layout is in charge of positioning the hover cell in the correct location
* on the screen in response to user touch events. It uses the position of the
* hover cell to determine when two cells should be swapped. If two cells should
* be swapped, all the corresponding data set and layout changes are handled here.
* </p>
* If no cell is selected, all the touch events are passed down to the ListView
* and behave normally. If one of the items in the ListView experiences a
* long press event, the contents of its current visible state are captured as
* a bitmap and its visibility is set to INVISIBLE. A hover cell is then created and
* added to this layout as an overlaying BitmapDrawable above the ListView. Once the
* hover cell is translated some distance to signify an item swap, a data set change
* accompanied by animation takes place. When the user releases the hover cell,
* it animates into its corresponding position in the ListView.
* </p>
* When the hover cell is either above or below the bounds of the ListView, this
* ListView also scrolls on its own so as to reveal additional content.
* </p>
* See http://youtu.be/_BZIvjMgH-Q
*/
public class DynamicListView extends ListView {
private int mOriginalTranscriptMode;
public interface OnHoverCellListener {
public Drawable onHoverCellCreated(Drawable hoverCellDrawable);
}
/**
* Implement this interface to be notified of ordering changes. Call {@link #setOnItemMovedListener(com.nhaarman.listviewanimations.widget.DynamicListView.OnItemMovedListener)}.
*/
public interface OnItemMovedListener {
/**
* Called after an item is dropped and moved.
*
* @param newPosition the new position of the item.
*/
public void onItemMoved(int newPosition);
}
private final int SMOOTH_SCROLL_AMOUNT_AT_EDGE = 15;
private final int MOVE_DURATION = 150;
private int mLastEventY = -1, mLastEventX = -1;
private int mDownY = -1;
private int mDownX = -1;
private int mTotalOffset = 0;
private boolean mCellIsMobile = false;
private boolean mIsMobileScrolling = false;
private int mSmoothScrollAmountAtEdge = 0;
private final int INVALID_ID = -1;
private long mAboveItemId = INVALID_ID;
private long mMobileItemId = INVALID_ID;
private long mBelowItemId = INVALID_ID;
private Drawable mHoverCell;
private Rect mHoverCellCurrentBounds;
private Rect mHoverCellOriginalBounds;
private final int INVALID_POINTER_ID = -1;
private int mActivePointerId = INVALID_POINTER_ID;
private boolean mIsWaitingForScrollFinish = false;
private int mScrollState = OnScrollListener.SCROLL_STATE_IDLE;
private OnTouchListener mOnTouchListener;
private boolean mIsParentHorizontalScrollContainer;
private int mResIdOfDynamicTouchChild;
private boolean mDynamicTouchChildTouched;
private int mSlop;
private boolean mSkipCallingOnTouchListener;
private OnHoverCellListener mOnHoverCellListener;
private OnItemMovedListener mOnItemMovedListener;
private int mLastMovedToIndex;
public DynamicListView(Context context) {
super(context);
init(context);
}
public DynamicListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
public DynamicListView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public void init(Context context) {
setOnItemLongClickListener(mOnItemLongClickListener);
setOnScrollListener(mScrollListener);
DisplayMetrics metrics = context.getResources().getDisplayMetrics();
mSmoothScrollAmountAtEdge = (int) (SMOOTH_SCROLL_AMOUNT_AT_EDGE / metrics.density);
ViewConfiguration vc = ViewConfiguration.get(getContext());
mSlop = vc.getScaledTouchSlop();
}
public void setAdapter(BaseAdapter adapter) {
super.setAdapter(adapter);
}
@Override
@Deprecated
/**
* @deprecated use #setAdapter(BaseAdapter) instead.
*/
public void setAdapter(ListAdapter adapter) {
if (!(adapter instanceof BaseAdapter)) {
throw new IllegalArgumentException("DynamicListView needs a BaseAdapter!");
}
super.setAdapter(adapter);
}
/**
* Listens for long clicks on any items in the listview. When a cell has
* been selected, the hover cell is created and set up.
*/
private OnItemLongClickListener mOnItemLongClickListener = new OnItemLongClickListener() {
public boolean onItemLongClick(AdapterView<?> arg0, View arg1, int pos, long id) {
if (mResIdOfDynamicTouchChild == 0) {
mDynamicTouchChildTouched = true;
makeCellMobile();
return true;
}
return false;
}
};
private void makeCellMobile() {
int position = pointToPosition(mDownX, mDownY);
int itemNum = position - getFirstVisiblePosition();
View selectedView = getChildAt(itemNum);
if (selectedView == null || position < getHeaderViewsCount() || position >= getAdapter().getCount() - getHeaderViewsCount()) {
return;
}
mOriginalTranscriptMode = getTranscriptMode();
setTranscriptMode(TRANSCRIPT_MODE_NORMAL);
mTotalOffset = 0;
mMobileItemId = getAdapter().getItemId(position);
mHoverCell = getAndAddHoverView(selectedView);
if (mOnHoverCellListener != null) {
mHoverCell = mOnHoverCellListener.onHoverCellCreated(mHoverCell);
}
selectedView.setVisibility(INVISIBLE);
mCellIsMobile = true;
getParent().requestDisallowInterceptTouchEvent(true);
updateNeighborViewsForId(mMobileItemId);
}
/**
* Creates the hover cell with the appropriate bitmap and of appropriate
* size. The hover cell's BitmapDrawable is drawn on top of the bitmap every
* single time an invalidate call is made.
*/
private BitmapDrawable getAndAddHoverView(View v) {
int w = v.getWidth();
int h = v.getHeight();
int top = v.getTop();
int left = v.getLeft();
Bitmap b = getBitmapFromView(v);
BitmapDrawable drawable = new BitmapDrawable(getResources(), b);
mHoverCellOriginalBounds = new Rect(left, top, left + w, top + h);
mHoverCellCurrentBounds = new Rect(mHoverCellOriginalBounds);
drawable.setBounds(mHoverCellCurrentBounds);
return drawable;
}
/**
* Returns a bitmap showing a screenshot of the view passed in.
*/
private Bitmap getBitmapFromView(View v) {
Bitmap bitmap = Bitmap.createBitmap(v.getWidth(), v.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
v.draw(canvas);
return bitmap;
}
/**
* Stores a reference to the views above and below the item currently
* corresponding to the hover cell. It is important to note that if this
* item is either at the top or bottom of the list, mAboveItemId or mBelowItemId
* may be invalid.
*/
private void updateNeighborViewsForId(long itemId) {
int position = getPositionForId(itemId);
ListAdapter adapter = getAdapter();
if (!adapter.hasStableIds()) {
throw new IllegalStateException("Adapter doesn't have stable ids! Make sure your adapter has stable ids, and override hasStableIds() to return true.");
}
mAboveItemId = position - 1 >= 0 ? adapter.getItemId(position - 1) : INVALID_ROW_ID;
mBelowItemId = position + 1 < adapter.getCount() ? adapter.getItemId(position + 1) : INVALID_ROW_ID;
}
/**
* Retrieves the view in the list corresponding to itemId
*/
private View getViewForId(long itemId) {
int firstVisiblePosition = getFirstVisiblePosition();
ListAdapter adapter = getAdapter();
if (!adapter.hasStableIds()) {
throw new IllegalStateException("Adapter doesn't have stable ids! Make sure your adapter has stable ids, and override hasStableIds() to return true.");
}
for (int i = 0; i < getChildCount(); i++) {
View v = getChildAt(i);
int position = firstVisiblePosition + i;
long id = adapter.getItemId(position);
if (id == itemId) {
return v;
}
}
return null;
}
/**
* Retrieves the position in the list corresponding to itemId
*/
private int getPositionForId(long itemId) {
View v = getViewForId(itemId);
if (v == null) {
return -1;
} else {
return getPositionForView(v);
}
}
/**
* dispatchDraw gets invoked when all the child views are about to be drawn.
* By overriding this method, the hover cell (BitmapDrawable) can be drawn
* over the listview's items whenever the listview is redrawn.
*/
@Override
protected void dispatchDraw(Canvas canvas) {
super.dispatchDraw(canvas);
if (mHoverCell != null) {
mHoverCell.draw(canvas);
}
}
@Override
public void setOnTouchListener(OnTouchListener l) {
mOnTouchListener = l;
}
public void setOnHoverCellListener(OnHoverCellListener onHoverCellListener) {
mOnHoverCellListener = onHoverCellListener;
}
private Rect getChildViewRect(View parentView, View childView) {
final Rect childRect = new Rect(childView.getLeft(), childView.getTop(), childView.getRight(), childView.getBottom());
if (parentView == childView) {
return childRect;
}
ViewGroup parent;
while ((parent = (ViewGroup) childView.getParent()) != parentView) {
childRect.offset(parent.getLeft(), parent.getTop());
childView = parent;
}
return childRect;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (mSkipCallingOnTouchListener) {
return super.onTouchEvent(event);
}
if (mOnTouchListener instanceof SwipeOnTouchListener) {
if (((SwipeOnTouchListener) mOnTouchListener).isSwiping()) {
mSkipCallingOnTouchListener = true;
boolean retVal = mOnTouchListener.onTouch(this, event);
mSkipCallingOnTouchListener = false;
return retVal || super.onTouchEvent(event);
}
}
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
mDownX = (int) event.getX();
mDownY = (int) event.getY();
mActivePointerId = event.getPointerId(0);
mDynamicTouchChildTouched = false;
if (mResIdOfDynamicTouchChild != 0) {
mIsParentHorizontalScrollContainer = false;
int position = pointToPosition(mDownX, mDownY);
int childNum = (position != INVALID_POSITION) ? position - getFirstVisiblePosition() : -1;
View itemView = (childNum >= 0) ? getChildAt(childNum) : null;
View childView = (itemView != null) ? itemView.findViewById(mResIdOfDynamicTouchChild) : null;
if (childView != null) {
final Rect childRect = getChildViewRect(this, childView);
if (childRect.contains(mDownX, mDownY)) {
mDynamicTouchChildTouched = true;
getParent().requestDisallowInterceptTouchEvent(true);
}
}
}
if (mIsParentHorizontalScrollContainer) {
// Do it now and don't wait until the user moves more than the
// slop factor.
getParent().requestDisallowInterceptTouchEvent(true);
}
break;
case MotionEvent.ACTION_MOVE:
if (mActivePointerId == INVALID_POINTER_ID) {
break;
}
int pointerIndex = event.findPointerIndex(mActivePointerId);
mLastEventY = (int) event.getY(pointerIndex);
mLastEventX = (int) event.getX(pointerIndex);
int deltaY = mLastEventY - mDownY;
int deltaX = mLastEventX - mDownX;
if (!mCellIsMobile && mDynamicTouchChildTouched) {
if (Math.abs(deltaY) > mSlop && Math.abs(deltaY) > Math.abs(deltaX)) {
makeCellMobile();
// Cancel ListView's touch (un-highlighting the item)
MotionEvent cancelEvent = MotionEvent.obtain(event);
cancelEvent.setAction(MotionEvent.ACTION_CANCEL | (event.getActionIndex() << MotionEvent.ACTION_POINTER_INDEX_SHIFT));
super.onTouchEvent(cancelEvent);
cancelEvent.recycle();
}
}
if (mCellIsMobile) {
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left, mHoverCellOriginalBounds.top + deltaY + mTotalOffset);
mHoverCell.setBounds(mHoverCellCurrentBounds);
invalidate();
handleCellSwitch();
mIsMobileScrolling = false;
handleMobileCellScroll();
}
break;
case MotionEvent.ACTION_UP:
mDynamicTouchChildTouched = false;
touchEventsEnded();
break;
case MotionEvent.ACTION_CANCEL:
mDynamicTouchChildTouched = false;
touchEventsCancelled();
break;
case MotionEvent.ACTION_POINTER_UP:
/*
* If a multitouch event took place and the original touch dictating
* the movement of the hover cell has ended, then the dragging event
* ends and the hover cell is animated to its corresponding position
* in the listview.
*/
pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(pointerIndex);
if (pointerId == mActivePointerId) {
mDynamicTouchChildTouched = false;
touchEventsEnded();
}
break;
default:
break;
}
if (mCellIsMobile) {
return false;
} else if (mOnTouchListener != null) {
mSkipCallingOnTouchListener = true;
boolean retVal = mOnTouchListener.onTouch(this, event);
mSkipCallingOnTouchListener = false;
if (retVal) {
return true;
}
}
return super.onTouchEvent(event);
}
/**
* This method determines whether the hover cell has been shifted far enough
* to invoke a cell swap. If so, then the respective cell swap candidate is
* determined and the data set is changed. Upon posting a notification of the
* data set change, a layout is invoked to place the cells in the right place.
* Using a ViewTreeObserver and a corresponding OnPreDrawListener, we can
* offset the cell being swapped to where it previously was and then animate it to
* its new position.
*/
private void handleCellSwitch() {
final int deltaY = mLastEventY - mDownY;
int deltaYTotal = mHoverCellOriginalBounds.top + mTotalOffset + deltaY;
View belowView = getViewForId(mBelowItemId);
View mobileView = getViewForId(mMobileItemId);
View aboveView = getViewForId(mAboveItemId);
boolean isBelow = (belowView != null) && (deltaYTotal > belowView.getTop());
boolean isAbove = (aboveView != null) && (deltaYTotal < aboveView.getTop());
if (isBelow || isAbove) {
final long switchItemId = isBelow ? mBelowItemId : mAboveItemId;
View switchView = isBelow ? belowView : aboveView;
final int originalItem = getPositionForView(mobileView);
if (switchView == null) {
updateNeighborViewsForId(mMobileItemId);
return;
}
if (getPositionForView(switchView) < getHeaderViewsCount()) {
return;
}
swapElements(originalItem, getPositionForView(switchView));
BaseAdapter adapter;
if (getAdapter() instanceof HeaderViewListAdapter) {
adapter = (BaseAdapter) ((HeaderViewListAdapter) getAdapter()).getWrappedAdapter();
} else {
adapter = (BaseAdapter) getAdapter();
}
adapter.notifyDataSetChanged();
mDownY = mLastEventY;
mDownX = mLastEventX;
final int switchViewStartTop = switchView.getTop();
mobileView.setVisibility(View.VISIBLE);
switchView.setVisibility(View.INVISIBLE);
updateNeighborViewsForId(mMobileItemId);
final ViewTreeObserver observer = getViewTreeObserver();
observer.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
observer.removeOnPreDrawListener(this);
View switchView = getViewForId(switchItemId);
mTotalOffset += deltaY;
int switchViewNewTop = switchView.getTop();
int delta = switchViewStartTop - switchViewNewTop;
ViewHelper.setTranslationY(switchView, delta);
ObjectAnimator animator = ObjectAnimator.ofFloat(switchView, "translationY", 0);
animator.setDuration(MOVE_DURATION);
animator.start();
return true;
}
});
}
}
private void swapElements(int indexOne, int indexTwo) {
mLastMovedToIndex = indexTwo;
ListAdapter adapter = getAdapter();
if (adapter instanceof HeaderViewListAdapter) {
adapter = ((HeaderViewListAdapter) adapter).getWrappedAdapter();
}
if (adapter instanceof Swappable) {
((Swappable) adapter).swapItems(indexOne - getHeaderViewsCount(), indexTwo - getHeaderViewsCount());
}
}
/**
* Resets all the appropriate fields to a default state while also animating
* the hover cell back to its correct location.
*/
private void touchEventsEnded() {
final View mobileView = getViewForId(mMobileItemId);
if (mCellIsMobile || mIsWaitingForScrollFinish) {
mCellIsMobile = false;
mIsWaitingForScrollFinish = false;
mIsMobileScrolling = false;
mActivePointerId = INVALID_POINTER_ID;
/* Restore the transcript mode */
setTranscriptMode(mOriginalTranscriptMode);
// If the autoscroller has not completed scrolling, we need to wait
// for it to
// finish in order to determine the final location of where the
// hover cell
// should be animated to.
if (mScrollState != OnScrollListener.SCROLL_STATE_IDLE) {
mIsWaitingForScrollFinish = true;
return;
}
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left, mobileView.getTop());
ObjectAnimator hoverViewAnimator = ObjectAnimator.ofObject(mHoverCell, "bounds", sBoundEvaluator, mHoverCellCurrentBounds);
hoverViewAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
invalidate();
}
});
hoverViewAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
setEnabled(false);
}
@Override
public void onAnimationEnd(Animator animation) {
mAboveItemId = INVALID_ID;
mMobileItemId = INVALID_ID;
mBelowItemId = INVALID_ID;
mobileView.setVisibility(VISIBLE);
mHoverCell = null;
setEnabled(true);
invalidate();
if (mOnItemMovedListener != null) {
mOnItemMovedListener.onItemMoved(mLastMovedToIndex - getHeaderViewsCount());
}
}
});
hoverViewAnimator.start();
} else {
touchEventsCancelled();
}
}
/**
* Resets all the appropriate fields to a default state.
*/
private void touchEventsCancelled() {
View mobileView = getViewForId(mMobileItemId);
if (mCellIsMobile) {
mAboveItemId = INVALID_ID;
mMobileItemId = INVALID_ID;
mBelowItemId = INVALID_ID;
mobileView.setVisibility(VISIBLE);
mHoverCell = null;
invalidate();
}
mCellIsMobile = false;
mIsMobileScrolling = false;
mActivePointerId = INVALID_POINTER_ID;
}
/**
* This TypeEvaluator is used to animate the BitmapDrawable back to its
* final location when the user lifts his finger by modifying the
* BitmapDrawable's bounds.
*/
private final static TypeEvaluator<Rect> sBoundEvaluator = new TypeEvaluator<Rect>() {
public Rect evaluate(float fraction, Rect startValue, Rect endValue) {
return new Rect(interpolate(startValue.left, endValue.left, fraction), interpolate(startValue.top, endValue.top, fraction), interpolate(startValue.right, endValue.right, fraction),
interpolate(startValue.bottom, endValue.bottom, fraction));
}
public int interpolate(int start, int end, float fraction) {
return (int) (start + fraction * (end - start));
}
};
/**
* Determines whether this listview is in a scrolling state invoked
* by the fact that the hover cell is out of the bounds of the listview;
*/
private void handleMobileCellScroll() {
mIsMobileScrolling = handleMobileCellScroll(mHoverCellCurrentBounds);
}
/**
* This method is in charge of determining if the hover cell is above
* or below the bounds of the listview. If so, the listview does an appropriate
* upward or downward smooth scroll so as to reveal new items.
*/
private boolean handleMobileCellScroll(Rect r) {
int offset = computeVerticalScrollOffset();
int height = getHeight();
int extent = computeVerticalScrollExtent();
int range = computeVerticalScrollRange();
int hoverViewTop = r.top;
int hoverHeight = r.height();
if (hoverViewTop <= 0 && offset > 0) {
smoothScrollBy(-mSmoothScrollAmountAtEdge, 0);
return true;
}
if (hoverViewTop + hoverHeight >= height && (offset + extent) < range) {
smoothScrollBy(mSmoothScrollAmountAtEdge, 0);
return true;
}
return false;
}
public void setIsParentHorizontalScrollContainer(boolean isParentHorizontalScrollContainer) {
mIsParentHorizontalScrollContainer = (mResIdOfDynamicTouchChild == 0) && isParentHorizontalScrollContainer;
}
public boolean isParentHorizontalScrollContainer() {
return mIsParentHorizontalScrollContainer;
}
public void setDynamicTouchChild(int childResId) {
mResIdOfDynamicTouchChild = childResId;
if (childResId != 0) {
setIsParentHorizontalScrollContainer(false);
}
}
/**
* This scroll listener is added to the listview in order to handle cell swapping
* when the cell is either at the top or bottom edge of the listview. If the hover
* cell is at either edge of the listview, the listview will begin scrolling. As
* scrolling takes place, the listview continuously checks if new cells became visible
* and determines whether they are potential candidates for a cell swap.
*/
private OnScrollListener mScrollListener = new OnScrollListener() {
private int mPreviousFirstVisibleItem = -1;
private int mPreviousVisibleItemCount = -1;
private int mCurrentFirstVisibleItem;
private int mCurrentVisibleItemCount;
private int mCurrentScrollState;
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
mCurrentFirstVisibleItem = firstVisibleItem;
mCurrentVisibleItemCount = visibleItemCount;
mPreviousFirstVisibleItem = (mPreviousFirstVisibleItem == -1) ? mCurrentFirstVisibleItem : mPreviousFirstVisibleItem;
mPreviousVisibleItemCount = (mPreviousVisibleItemCount == -1) ? mCurrentVisibleItemCount : mPreviousVisibleItemCount;
checkAndHandleFirstVisibleCellChange();
checkAndHandleLastVisibleCellChange();
mPreviousFirstVisibleItem = mCurrentFirstVisibleItem;
mPreviousVisibleItemCount = mCurrentVisibleItemCount;
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
mCurrentScrollState = scrollState;
mScrollState = scrollState;
isScrollCompleted();
}
/**
* This method is in charge of invoking 1 of 2 actions. Firstly, if the listview
* is in a state of scrolling invoked by the hover cell being outside the bounds
* of the listview, then this scrolling event is continued. Secondly, if the hover
* cell has already been released, this invokes the animation for the hover cell
* to return to its correct position after the listview has entered an idle scroll
* state.
*/
private void isScrollCompleted() {
if (mCurrentVisibleItemCount > 0 && mCurrentScrollState == SCROLL_STATE_IDLE) {
if (mCellIsMobile && mIsMobileScrolling) {
handleMobileCellScroll();
} else if (mIsWaitingForScrollFinish) {
touchEventsEnded();
}
}
}
/**
* Determines if the listview scrolled up enough to reveal a new cell at the
* top of the list. If so, then the appropriate parameters are updated.
*/
public void checkAndHandleFirstVisibleCellChange() {
if (mCurrentFirstVisibleItem != mPreviousFirstVisibleItem) {
if (mCellIsMobile && mMobileItemId != INVALID_ID) {
updateNeighborViewsForId(mMobileItemId);
handleCellSwitch();
}
}
}
/**
* Determines if the listview scrolled down enough to reveal a new cell at the
* bottom of the list. If so, then the appropriate parameters are updated.
*/
public void checkAndHandleLastVisibleCellChange() {
int currentLastVisibleItem = mCurrentFirstVisibleItem + mCurrentVisibleItemCount;
int previousLastVisibleItem = mPreviousFirstVisibleItem + mPreviousVisibleItemCount;
if (currentLastVisibleItem != previousLastVisibleItem) {
if (mCellIsMobile && mMobileItemId != INVALID_ID) {
updateNeighborViewsForId(mMobileItemId);
handleCellSwitch();
}
}
}
};
/**
* Set the {@link com.nhaarman.listviewanimations.widget.DynamicListView.OnItemMovedListener} to be notified when an item is dropped.
*/
public void setOnItemMovedListener(OnItemMovedListener onItemMovedListener) {
this.mOnItemMovedListener = onItemMovedListener;
}
/**
* Interface, usually implemented by a {@link com.nhaarman.listviewanimations.BaseAdapterDecorator},
* that indicates that it can swap the visual position of two list items.
*
* @author Anton Spaans on 9/11/13.
*/
public interface Swappable {
/**
* Swaps the item on the first adapter position with the item on the second adapter position.
* Be sure to call {@link android.widget.BaseAdapter#notifyDataSetChanged()} if appropriate when implementing this method.
*
* @param positionOne First adapter position.
* @param positionTwo Second adapter position.
*/
public void swapItems(int positionOne, int positionTwo);
}
}
|
Fixed crash when the listview has footerViews
|
library/src/com/nhaarman/listviewanimations/widget/DynamicListView.java
|
Fixed crash when the listview has footerViews
|
|
Java
|
apache-2.0
|
a8d8d9467937a26a7e11543eef1ee2c4acad70c5
| 0
|
nordfalk/AndroidElementer,nordfalk/AndroidElementer,nordfalk/AndroidElementer
|
package lekt04_lister2;
import android.app.Activity;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import dk.nordfalk.android.elementer.R;
/**
* Hvis man har en lang liste, krav til jævn scrolling og/eller skal
* vise listeelementer der kræver indlæsning af resurser.
* På Android bruges det goe gamle designmønster med GUI-tråden: Al GUI
* afvikles i ÉN tråd, både visning/scrolling og forberedelse af de ting
* der skal vise. I visse tilfælde bliver det vigtigt at tænke sig godt
* om når man programmerer listefunktioner.
* Der skal simpelt hen ske så lidt som muligt (herunder ingen indlæsning
* af resurser eller dekodning af bitmaps) synkront i GUI-tråden under
* listevisningen.
* Et andet tip er her:
* http://developer.android.com/training/improving-layouts/smooth-scrolling.html#ViewHolder
* Et tredje tip er at tænke over hvilken baggrund man bruger. Jeg har
* selv prøvet at gå fra forfærdelig til fantastisk performance ved at
* kigge mine ListViews/ListActivities efter i sømmene og få luget ud i
* baggrundene.
*
* @author j
*/
public class VisAlleAndroidDrawables extends Activity {
/**
* Om billeder og resurser skal indlæses i en baggrundstråd eller i GUI-tråden
*/
boolean asynkronIndlæsning = true;
/**
* Om views bliver genbrugt eller ej
*/
boolean genbrugElementer = true;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ListView listView = new ListView(this);
listView.setAdapter(new AndroidDrawablesAdapter());
listView.setDividerHeight(3);
/*
håndtering af baggrundsbilleder
listView.setBackgroundResource(R.drawable.bil);
// Sørg for at baggrunden bliver tegnet, også når listen scroller.
// Se http://android-developers.blogspot.com/2009/01/why-is-my-list-black-android.html
listView.setCacheColorHint(0x00000000);
// Man kunne have en ensfarvet baggrund, det gør scroll mere flydende
//getListView().setCacheColorHint(0xffe4e4e4);
*/
if (savedInstanceState == null) {
// Ny aktivitet, vis hjælp
Toast.makeText(this, "Dette eksempel viser også hvor stor forskel genbrug af elementer og asynkron indlæsning gør", Toast.LENGTH_LONG).show();
Toast.makeText(this, "Tryk MENU for at slå disse forbedringer fra og mærk forskellen", Toast.LENGTH_LONG).show();
}
setContentView(listView);
}
public class AndroidDrawablesAdapter extends BaseAdapter {
Resources res = getResources();
public int getCount() {
return 1500;
} // der er omkring tusind drawables
public Object getItem(int position) {
return position;
} // bruges ikke
public long getItemId(int position) {
return position;
} // bruges ikke
public View getView(final int position, View view, ViewGroup parent) {
final ListeelemViewholder listeelem;
if (view == null || !genbrugElementer) {
view = getLayoutInflater().inflate(R.layout.listeelement, null);
// For at spare CPU-cykler cacher vi opslagene i findViewById(). Se
// http://developer.android.com/training/improving-layouts/smooth-scrolling.html
listeelem = new ListeelemViewholder();
listeelem.overskrift = (TextView) view.findViewById(R.id.listeelem_overskrift);
listeelem.beskrivelse = (TextView) view.findViewById(R.id.listeelem_beskrivelse);
listeelem.billede = (ImageView) view.findViewById(R.id.listeelem_billede);
view.setTag(listeelem);
} else {
listeelem = (ListeelemViewholder) view.getTag();
}
final int resurseId = android.R.drawable.alert_dark_frame + position; // første resurse
listeelem.overskrift.setText(Integer.toString(resurseId));
listeelem.beskrivelse.setText("Hex: " + Integer.toHexString(resurseId));
// For at sikre flydende scroll kan vi IKKE indlæse resursen i GUI-tråden
if (!asynkronIndlæsning) {
listeelem.billede.setImageResource(resurseId);
} else {
listeelem.billede.setImageDrawable(null);
listeelem.position = position;
// Brug en AsyncTask til at indlæse billedet i baggrunden
new AsyncTask() {
@Override
protected Object doInBackground(Object... params) {
// Tjek om viewholderen er blevet genbrugt til anden position
if (listeelem.position == position) {
try {
return res.getDrawable(resurseId); // Overfør til onPostExecute()
} catch (Exception e) {// sker hvis en drawable med det ID ikke findes
}
}
return null;
}
@Override
protected void onPostExecute(Object result) {
// Tjek om viewholderen er blevet genbrugt til anden position
if (listeelem.position != position) {
return;
}
listeelem.billede.setImageDrawable((Drawable) result);
}
}.execute();
}
return view;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(Menu.NONE, 100, Menu.NONE, "Genbrug elementer");
menu.add(Menu.NONE, 101, Menu.NONE, "Asynkron indlæsning");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == 100) {
genbrugElementer = !genbrugElementer;
Toast.makeText(this, "Genbrug elementer: " + genbrugElementer, Toast.LENGTH_SHORT).show();
}
if (item.getItemId() == 101) {
asynkronIndlæsning = !asynkronIndlæsning;
Toast.makeText(this, "Asynkron indlæsning: " + asynkronIndlæsning, Toast.LENGTH_SHORT).show();
}
return true;
}
}
|
src/lekt04_lister2/VisAlleAndroidDrawables.java
|
package lekt04_lister2;
import android.app.Activity;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import dk.nordfalk.android.elementer.R;
/**
* Hvis man har en lang liste, krav til jævn scrolling og/eller skal
* vise listeelementer der kræver indlæsning af resurser.
* På Android bruges det goe gamle designmønster med GUI-tråden: Al GUI
* afvikles i ÉN tråd, både visning/scrolling og forberedelse af de ting
* der skal vise. I visse tilfælde bliver det vigtigt at tænke sig godt
* om når man programmerer listefunktioner.
* Der skal simpelt hen ske så lidt som muligt (herunder ingen indlæsning
* af resurser eller dekodning af bitmaps) synkront i GUI-tråden under
* listevisningen.
* Et andet tip er her:
* http://developer.android.com/training/improving-layouts/smooth-scrolling.html#ViewHolder
* Et tredje tip er at tænke over hvilken baggrund man bruger. Jeg har
* selv prøvet at gå fra forfærdelig til fantastisk performance ved at
* kigge mine ListViews/ListActivities efter i sømmene og få luget ud i
* baggrundene.
*
* @author j
*/
public class VisAlleAndroidDrawables extends Activity {
/**
* Om billeder og resurser skal indlæses i en baggrundstråd eller i GUI-tråden
*/
boolean asynkronIndlæsning = true;
/**
* Om views bliver genbrugt eller ej
*/
boolean genbrugElementer = true;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ListView listView = new ListView(this);
listView.setAdapter(new AndroidDrawablesAdapter());
listView.setDividerHeight(3);
/*
håndtering af baggrundsbilleder
listView.setBackgroundResource(R.drawable.bil);
// Sørg for at baggrunden bliver tegnet, også når listen scroller.
// Se http://android-developers.blogspot.com/2009/01/why-is-my-list-black-android.html
listView.setCacheColorHint(0x00000000);
// Man kunne have en ensfarvet baggrund, det gør scroll mere flydende
//getListView().setCacheColorHint(0xffe4e4e4);
*/
if (savedInstanceState == null) {
// Ny aktivitet, vis hjælp
Toast.makeText(this, "Dette eksempel viser også hvor stor forskel genbrug af elementer og asynkron indlæsning gør", Toast.LENGTH_LONG).show();
Toast.makeText(this, "Tryk MENU for at slå disse forbedringer fra og mærk forskellen", Toast.LENGTH_LONG).show();
}
setContentView(listView);
}
public class AndroidDrawablesAdapter extends BaseAdapter {
Resources res = getResources();
public int getCount() {
return 1500;
} // der er omkring tusind drawables
public Object getItem(int position) {
return position;
} // bruges ikke
public long getItemId(int position) {
return position;
} // bruges ikke
public View getView(final int position, View view, ViewGroup parent) {
final ListeelemViewholder listeelem;
if (genbrugElementer && view != null) {
listeelem = (ListeelemViewholder) view.getTag();
} else {
view = getLayoutInflater().inflate(R.layout.listeelement, null);
// For at spare CPU-cykler cacher vi opslagene i findViewById(). Se
// http://developer.android.com/training/improving-layouts/smooth-scrolling.html
listeelem = new ListeelemViewholder();
listeelem.overskrift = (TextView) view.findViewById(R.id.listeelem_overskrift);
listeelem.beskrivelse = (TextView) view.findViewById(R.id.listeelem_beskrivelse);
listeelem.billede = (ImageView) view.findViewById(R.id.listeelem_billede);
view.setTag(listeelem);
}
final int resurseId = android.R.drawable.alert_dark_frame + position; // første resurse
listeelem.overskrift.setText(Integer.toString(resurseId));
listeelem.beskrivelse.setText("Hex: " + Integer.toHexString(resurseId));
// For at sikre flydende scroll kan vi IKKE indlæse resursen i GUI-tråden
if (!asynkronIndlæsning) {
listeelem.billede.setImageResource(resurseId);
} else {
listeelem.billede.setImageDrawable(null);
listeelem.position = position;
// Brug en AsyncTask til at indlæse billedet i baggrunden
new AsyncTask() {
@Override
protected Object doInBackground(Object... params) {
// Tjek om viewholderen er blevet genbrugt til anden position
if (listeelem.position == position) {
try {
return res.getDrawable(resurseId); // Overfør til onPostExecute()
} catch (Exception e) {// sker hvis en drawable med det ID ikke findes
}
}
return null;
}
@Override
protected void onPostExecute(Object result) {
// Tjek om viewholderen er blevet genbrugt til anden position
if (listeelem.position != position) {
return;
}
listeelem.billede.setImageDrawable((Drawable) result);
}
}.execute();
}
return view;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(Menu.NONE, 100, Menu.NONE, "Genbrug elementer");
menu.add(Menu.NONE, 101, Menu.NONE, "Asynkron indlæsning");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == 100) {
genbrugElementer = !genbrugElementer;
Toast.makeText(this, "Genbrug elementer: " + genbrugElementer, Toast.LENGTH_SHORT).show();
}
if (item.getItemId() == 101) {
asynkronIndlæsning = !asynkronIndlæsning;
Toast.makeText(this, "Asynkron indlæsning: " + asynkronIndlæsning, Toast.LENGTH_SHORT).show();
}
return true;
}
}
|
Småting
|
src/lekt04_lister2/VisAlleAndroidDrawables.java
|
Småting
|
|
Java
|
apache-2.0
|
afd7d1c64f6b1675ad27a599cf205f105dfb77da
| 0
|
apache/commons-validator,floscher/commons-validator,floscher/commons-validator,mohanaraosv/commons-validator,floscher/commons-validator,apache/commons-validator,apache/commons-validator,mohanaraosv/commons-validator,mohanaraosv/commons-validator
|
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//validator/src/share/org/apache/commons/validator/Arg.java,v 1.5 2003/03/16 00:23:15 dgraham Exp $
* $Revision: 1.5 $
* $Date: 2003/03/16 00:23:15 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.validator;
import java.io.Serializable;
/**
* <p>A default argument or an argument for a
* specific validator definition (ex: required)
* can be stored to pass into a message as parameters. This can be used in a
* pluggable validator for constructing locale
* sensitive messages by using <code>java.text.MessageFormat</code>
* or an equivalent class. The resource field can be
* used to determine if the value stored in the argument
* is a value to be retrieved from a locale sensitive
* message retrieval system like <code>java.util.PropertyResourceBundle</code>.
* The resource field defaults to 'true'.</p>
*
* @author David Winterfeldt
* @version $Revision: 1.5 $ $Date: 2003/03/16 00:23:15 $
*/
public class Arg implements Cloneable, Serializable {
/**
* The name dependency that this argument goes with (optional).
*/
protected String name = null;
/**
* The key or value of the argument.
*/
protected String key = null;
/**
* Whether or not the key is a message resource (optional). Defaults to true.
* If it is 'true', the value will try to be resolved as a message resource.
*/
protected boolean resource = true;
/**
* Gets the name of the dependency.
*/
public String getName() {
return name;
}
/**
* Sets the name of the dependency.
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the key/value.
*/
public String getKey() {
return key;
}
/**
* Sets the key/value.
*/
public void setKey(String key) {
this.key = key;
}
/**
* Gets whether or not the key is a resource.
*/
public boolean getResource() {
return resource;
}
/**
* Sets whether or not the key is a resource.
*/
public void setResource(boolean resource) {
this.resource = resource;
}
/**
* Creates and returns a copy of this object.
*/
public Object clone() {
try {
Arg arg = (Arg)super.clone();
if (name != null) {
arg.setName(new String(name));
}
if (key != null) {
arg.setKey(new String(key));
}
arg.setResource(resource);
return arg;
} catch (CloneNotSupportedException e) {
throw new InternalError(e.toString());
}
}
/**
* Returns a string representation of the object.
*/
public String toString() {
StringBuffer results = new StringBuffer();
results.append("Arg: name=");
results.append(name);
results.append(" key=");
results.append(key);
results.append(" resource=");
results.append(resource);
results.append("\n");
return results.toString();
}
}
|
src/share/org/apache/commons/validator/Arg.java
|
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//validator/src/share/org/apache/commons/validator/Arg.java,v 1.4 2002/03/30 04:33:17 dwinterfeldt Exp $
* $Revision: 1.4 $
* $Date: 2002/03/30 04:33:17 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2002 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.validator;
import java.io.Serializable;
/**
* <p>An default argument or an argument for a
* specific validator definition (ex: required)
* can be stored to pass into a message as
* as parameters. This can be used in a
* pluggable validator for constructing locale
* sensitive messages by using <code>java.text.MessageFormat</code>
* or an equivalent class. The resource field can be
* used to determine if the value stored in the argument
* is a value to be retrieved from a locale sensitive
* message retrieval system like <code>java.util.PropertyResourceBundle</code>.
* The resource field defaults to 'true'.</p>
*
* @author David Winterfeldt
* @version $Revision: 1.4 $ $Date: 2002/03/30 04:33:17 $
*/
public class Arg implements Cloneable, Serializable {
/**
* The name dependency that this argument goes with (optional).
*/
protected String name = null;
/**
* The key or value of the argument.
*/
protected String key = null;
/**
* Whether or not the key is a message resource (optional). Defaults to true.
* If it is 'true', the value will try to be resolved as a message resource.
*/
protected boolean resource = true;
/**
* Gets the name of the dependency.
*/
public String getName() {
return name;
}
/**
* Sets the name of the dependency.
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the key/value.
*/
public String getKey() {
return key;
}
/**
* Sets the key/value.
*/
public void setKey(String key) {
this.key = key;
}
/**
* Gets whether or not the key is a resource.
*/
public boolean getResource() {
return resource;
}
/**
* Sets whether or not the key is a resource.
*/
public void setResource(boolean resource) {
this.resource = resource;
}
/**
* Creates and returns a copy of this object.
*/
public Object clone() {
try {
Arg arg = (Arg)super.clone();
if (name != null) {
arg.setName(new String(name));
}
if (key != null) {
arg.setKey(new String(key));
}
arg.setResource(resource);
return arg;
} catch (CloneNotSupportedException e) {
throw new InternalError(e.toString());
}
}
/**
* Returns a string representation of the object.
*/
public String toString() {
StringBuffer results = new StringBuffer();
results.append("Arg: name=");
results.append(name);
results.append(" key=");
results.append(key);
results.append(" resource=");
results.append(resource);
results.append("\n");
return results.toString();
}
}
|
javadoc changes only.
git-svn-id: c96248f4ce6931c9674b921fd55ab67490fa1adf@139909 13f79535-47bb-0310-9956-ffa450edef68
|
src/share/org/apache/commons/validator/Arg.java
|
javadoc changes only.
|
|
Java
|
apache-2.0
|
be3543c78dc490bd1da801cdb43f832e26ae3a03
| 0
|
cketti/k-9,k9mail/k-9,G00fY2/k-9_material_design,vatsalsura/k-9,dgger/k-9,jca02266/k-9,jberkel/k-9,ndew623/k-9,jca02266/k-9,philipwhiuk/q-mail,philipwhiuk/k-9,jberkel/k-9,cketti/k-9,k9mail/k-9,jca02266/k-9,ndew623/k-9,ndew623/k-9,indus1/k-9,GuillaumeSmaha/k-9,dgger/k-9,cketti/k-9,sedrubal/k-9,CodingRmy/k-9,vatsalsura/k-9,dgger/k-9,mawiegand/k-9,rishabhbitsg/k-9,G00fY2/k-9_material_design,vt0r/k-9,philipwhiuk/k-9,vt0r/k-9,mawiegand/k-9,mawiegand/k-9,roscrazy/k-9,CodingRmy/k-9,cketti/k-9,GuillaumeSmaha/k-9,indus1/k-9,philipwhiuk/q-mail,sedrubal/k-9,k9mail/k-9,philipwhiuk/q-mail,roscrazy/k-9,rishabhbitsg/k-9,GuillaumeSmaha/k-9
|
package com.fsck.k9.provider;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.Uri;
import android.support.annotation.MainThread;
import android.support.v4.content.FileProvider;
import android.util.Log;
import com.fsck.k9.BuildConfig;
import com.fsck.k9.K9;
import com.fsck.k9.service.FileProviderInterface;
public class DecryptedFileProvider extends FileProvider {
private static final String AUTHORITY = BuildConfig.APPLICATION_ID + ".decryptedfileprovider";
private static final String DECRYPTED_CACHE_DIRECTORY = "decrypted";
private static final long FILE_DELETE_THRESHOLD_MILLISECONDS = 3 * 60 * 1000;
private static DecryptedFileProviderCleanupReceiver receiverRegistered = null;
@Override
public String getType(Uri uri) {
return uri.getQueryParameter("mime_type");
}
public static FileProviderInterface getFileProviderInterface(Context context) {
final Context applicationContext = context.getApplicationContext();
return new FileProviderInterface() {
@Override
public File createProvidedFile() throws IOException {
registerFileCleanupReceiver(applicationContext);
File decryptedTempDirectory = getDecryptedTempDirectory(applicationContext);
return File.createTempFile("decrypted-", null, decryptedTempDirectory);
}
@Override
public Uri getUriForProvidedFile(File file, String mimeType) throws IOException {
Uri uri = FileProvider.getUriForFile(applicationContext, AUTHORITY, file);
return uri.buildUpon().appendQueryParameter("mime_type", mimeType).build();
}
};
}
public static boolean deleteOldTemporaryFiles(Context context) {
File tempDirectory = getDecryptedTempDirectory(context);
boolean allFilesDeleted = true;
long deletionThreshold = new Date().getTime() - FILE_DELETE_THRESHOLD_MILLISECONDS;
for (File tempFile : tempDirectory.listFiles()) {
long lastModified = tempFile.lastModified();
if (lastModified < deletionThreshold) {
boolean fileDeleted = tempFile.delete();
if (!fileDeleted) {
Log.e(K9.LOG_TAG, "Failed to delete temporary file");
// TODO really do this? might cause our service to stay up indefinitely if a file can't be deleted
allFilesDeleted = false;
}
} else {
allFilesDeleted = false;
}
}
return allFilesDeleted;
}
private static File getDecryptedTempDirectory(Context context) {
File directory = new File(context.getCacheDir(), DECRYPTED_CACHE_DIRECTORY);
if (!directory.exists()) {
if (!directory.mkdir()) {
Log.e(K9.LOG_TAG, "Error creating directory: " + directory.getAbsolutePath());
}
}
return directory;
}
@Override
public void onTrimMemory(int level) {
if (level < TRIM_MEMORY_COMPLETE) {
return;
}
Context context = getContext();
if (context == null) {
return;
}
deleteOldTemporaryFiles(context);
if (receiverRegistered != null) {
context.unregisterReceiver(receiverRegistered);
receiverRegistered = null;
}
}
@MainThread // no need to synchronize for receiverRegistered
private static void registerFileCleanupReceiver(Context context) {
if (receiverRegistered != null) {
return;
}
receiverRegistered = new DecryptedFileProviderCleanupReceiver();
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_SCREEN_OFF);
context.registerReceiver(receiverRegistered, intentFilter);
}
private static class DecryptedFileProviderCleanupReceiver extends BroadcastReceiver {
@Override
@MainThread
public void onReceive(Context context, Intent intent) {
if (!Intent.ACTION_SCREEN_OFF.equals(intent.getAction())) {
throw new IllegalArgumentException("onReceive called with action that isn't screen off!");
}
boolean allFilesDeleted = deleteOldTemporaryFiles(context);
if (allFilesDeleted) {
context.unregisterReceiver(this);
receiverRegistered = null;
}
}
}
}
|
k9mail/src/main/java/com/fsck/k9/provider/DecryptedFileProvider.java
|
package com.fsck.k9.provider;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.Uri;
import android.support.annotation.MainThread;
import android.support.v4.content.FileProvider;
import android.util.Log;
import com.fsck.k9.BuildConfig;
import com.fsck.k9.K9;
import com.fsck.k9.service.FileProviderInterface;
public class DecryptedFileProvider extends FileProvider {
private static final String AUTHORITY = BuildConfig.APPLICATION_ID + ".decryptedfileprovider";
private static final String DECRYPTED_CACHE_DIRECTORY = "decrypted";
private static final long FILE_DELETE_THRESHOLD_MILLISECONDS = 3 * 60 * 1000;
private static boolean receiverRegistered = false;
@Override
public String getType(Uri uri) {
return uri.getQueryParameter("mime_type");
}
public static FileProviderInterface getFileProviderInterface(Context context) {
final Context applicationContext = context.getApplicationContext();
return new FileProviderInterface() {
@Override
public File createProvidedFile() throws IOException {
registerFileCleanupReceiver(applicationContext);
File decryptedTempDirectory = getDecryptedTempDirectory(applicationContext);
return File.createTempFile("decrypted-", null, decryptedTempDirectory);
}
@Override
public Uri getUriForProvidedFile(File file, String mimeType) throws IOException {
Uri uri = FileProvider.getUriForFile(applicationContext, AUTHORITY, file);
return uri.buildUpon().appendQueryParameter("mime_type", mimeType).build();
}
};
}
public static boolean deleteOldTemporaryFiles(Context context) {
File tempDirectory = getDecryptedTempDirectory(context);
boolean allFilesDeleted = true;
long deletionThreshold = new Date().getTime() - FILE_DELETE_THRESHOLD_MILLISECONDS;
for (File tempFile : tempDirectory.listFiles()) {
long lastModified = tempFile.lastModified();
if (lastModified < deletionThreshold) {
boolean fileDeleted = tempFile.delete();
if (!fileDeleted) {
Log.e(K9.LOG_TAG, "Failed to delete temporary file");
// TODO really do this? might cause our service to stay up indefinitely if a file can't be deleted
allFilesDeleted = false;
}
} else {
allFilesDeleted = false;
}
}
return allFilesDeleted;
}
private static File getDecryptedTempDirectory(Context context) {
File directory = new File(context.getCacheDir(), DECRYPTED_CACHE_DIRECTORY);
if (!directory.exists()) {
if (!directory.mkdir()) {
Log.e(K9.LOG_TAG, "Error creating directory: " + directory.getAbsolutePath());
}
}
return directory;
}
@MainThread // no need to synchronize for receiverRegistered
private static void registerFileCleanupReceiver(Context context) {
if (receiverRegistered) {
return;
}
receiverRegistered = true;
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_SCREEN_OFF);
context.registerReceiver(new DecryptedFileProviderCleanupReceiver(), intentFilter);
}
private static class DecryptedFileProviderCleanupReceiver extends BroadcastReceiver {
@Override
@MainThread
public void onReceive(Context context, Intent intent) {
if (!Intent.ACTION_SCREEN_OFF.equals(intent.getAction())) {
throw new IllegalArgumentException("onReceive called with action that isn't screen off!");
}
boolean allFilesDeleted = deleteOldTemporaryFiles(context);
if (allFilesDeleted) {
context.unregisterReceiver(this);
receiverRegistered = false;
}
}
}
}
|
also clean up decrypted temp files on low memory
|
k9mail/src/main/java/com/fsck/k9/provider/DecryptedFileProvider.java
|
also clean up decrypted temp files on low memory
|
|
Java
|
apache-2.0
|
5eec9ac58cb4f2fe2518ef21b07845c7cf4c7b99
| 0
|
shelsonjava/netty,chinayin/netty,shenguoquan/netty,louxiu/netty,windie/netty,AnselQiao/netty,jchambers/netty,KatsuraKKKK/netty,youprofit/netty,ngocdaothanh/netty,Techcable/netty,CodingFabian/netty,slandelle/netty,seetharamireddy540/netty,silvaran/netty,KeyNexus/netty,mcanthony/netty,johnou/netty,chinayin/netty,Kalvar/netty,jovezhougang/netty,MediumOne/netty,AnselQiao/netty,orika/netty,altihou/netty,kiril-me/netty,AchinthaReemal/netty,shism/netty,chanakaudaya/netty,doom369/netty,danny200309/netty,s-gheldd/netty,timboudreau/netty,ioanbsu/netty,yipen9/netty,gigold/netty,bigheary/netty,DolphinZhao/netty,ichaki5748/netty,huuthang1993/netty,mcanthony/netty,andsel/netty,imangry/netty-zh,lukehutch/netty,eincs/netty,hgl888/netty,silvaran/netty,SinaTadayon/netty,kiril-me/netty,caoyanwei/netty,rovarga/netty,djchen/netty,sja/netty,tempbottle/netty,shenguoquan/netty,tbrooks8/netty,blademainer/netty,kjniemi/netty,lznhust/netty,joansmith/netty,jovezhougang/netty,xingguang2013/netty,skyao/netty,slandelle/netty,mway08/netty,seetharamireddy540/netty,ajaysarda/netty,maliqq/netty,netty/netty,orika/netty,yawkat/netty,Kingson4Wu/netty,xiexingguang/netty,duqiao/netty,sammychen105/netty,tbrooks8/netty,liyang1025/netty,sja/netty,carlbai/netty,codevelop/netty,pengzj/netty,JungMinu/netty,woshilaiceshide/netty,zhujingling/netty,balaprasanna/netty,Scottmitch/netty,ijuma/netty,maliqq/netty,jdivy/netty,x1957/netty,mikkokar/netty,qingsong-xu/netty,wuxiaowei907/netty,tempbottle/netty,ioanbsu/netty,kyle-liu/netty4study,Apache9/netty,shuangqiuan/netty,eincs/netty,bigheary/netty,bob329/netty,jchambers/netty,bryce-anderson/netty,sammychen105/netty,wangyikai/netty,sverkera/netty,yrcourage/netty,Mounika-Chirukuri/netty,kjniemi/netty,zzcclp/netty,louiscryan/netty,mx657649013/netty,silvaran/netty,f7753/netty,niuxinghua/netty,ichaki5748/netty,WangJunTYTL/netty,lightsocks/netty,IBYoung/netty,jenskordowski/netty,unei66/netty,AchinthaReemal/netty,rovarga/netty,jchambers/netty,woshilaiceshide/netty,qingsong-xu/netty,imangry/netty-zh,xingguang2013/netty,gigold/netty,zhoffice/netty,exinguu/netty,artgon/netty,castomer/netty,lugt/netty,MediumOne/netty,Spikhalskiy/netty,caoyanwei/netty,WangJunTYTL/netty,ijuma/netty,Kingson4Wu/netty,fantayeneh/netty,LuminateWireless/netty,daschl/netty,youprofit/netty,yonglehou/netty-1,yawkat/netty,IBYoung/netty,yawkat/netty,phlizik/netty,artgon/netty,exinguu/netty,lukehutch/netty,firebase/netty,nadeeshaan/netty,zhujingling/netty,imangry/netty-zh,afds/netty,golovnin/netty,develar/netty,nkhuyu/netty,brennangaunce/netty,yonglehou/netty-1,bob329/netty,hyangtack/netty,altihou/netty,afds/netty,junjiemars/netty,duqiao/netty,smayoorans/netty,timboudreau/netty,Squarespace/netty,wangyikai/netty,bob329/netty,LuminateWireless/netty,skyao/netty,danny200309/netty,hepin1989/netty,louiscryan/netty,jdivy/netty,serioussam/netty,louiscryan/netty,Kalvar/netty,Kalvar/netty,louxiu/netty,Kingson4Wu/netty,lukw00/netty,lukehutch/netty,liuciuse/netty,sverkera/netty,xingguang2013/netty,f7753/netty,fenik17/netty,wuxiaowei907/netty,SinaTadayon/netty,kvr000/netty,ejona86/netty,kvr000/netty,serioussam/netty,mway08/netty,yrcourage/netty,hyangtack/netty,daschl/netty,luyiisme/netty,zxhfirefox/netty,Squarespace/netty,chrisprobst/netty,Alwayswithme/netty,afredlyj/learn-netty,zhujingling/netty,NiteshKant/netty,satishsaley/netty,Mounika-Chirukuri/netty,sammychen105/netty,AchinthaReemal/netty,liuciuse/netty,pengzj/netty,zer0se7en/netty,BrunoColin/netty,sameira/netty,buchgr/netty,liuciuse/netty,huanyi0723/netty,BrunoColin/netty,chanakaudaya/netty,BrunoColin/netty,ngocdaothanh/netty,fengshao0907/netty,zer0se7en/netty,s-gheldd/netty,shelsonjava/netty,huuthang1993/netty,firebase/netty,fengshao0907/netty,sameira/netty,exinguu/netty,Apache9/netty,eonezhang/netty,mcobrien/netty,liyang1025/netty,bryce-anderson/netty,lukw00/netty,zhoffice/netty,johnou/netty,sunbeansoft/netty,louxiu/netty,sunbeansoft/netty,Squarespace/netty,wuyinxian124/netty,blucas/netty,nayato/netty,doom369/netty,ijuma/netty,carl-mastrangelo/netty,liyang1025/netty,ifesdjeen/netty,zhujingling/netty,AnselQiao/netty,nayato/netty,skyao/netty,carl-mastrangelo/netty,shuangqiuan/netty,shenguoquan/netty,carl-mastrangelo/netty,Spikhalskiy/netty,jenskordowski/netty,afds/netty,lznhust/netty,xingguang2013/netty,doom369/netty,dongjiaqiang/netty,purplefox/netty-4.0.2.8-hacked,brennangaunce/netty,normanmaurer/netty,mubarak/netty,moyiguket/netty,moyiguket/netty,lukw00/netty,djchen/netty,rovarga/netty,ejona86/netty,firebase/netty,shism/netty,gerdriesselmann/netty,LuminateWireless/netty,pengzj/netty,danny200309/netty,NiteshKant/netty,lugt/netty,ngocdaothanh/netty,drowning/netty,joansmith/netty,Mounika-Chirukuri/netty,sunbeansoft/netty,SinaTadayon/netty,jenskordowski/netty,youprofit/netty,blucas/netty,eincs/netty,zzcclp/netty,yonglehou/netty-1,ngocdaothanh/netty,sameira/netty,codevelop/netty,LuminateWireless/netty,codevelop/netty,exinguu/netty,wuxiaowei907/netty,balaprasanna/netty,CodingFabian/netty,Spikhalskiy/netty,nadeeshaan/netty,fengjiachun/netty,liyang1025/netty,junjiemars/netty,yrcourage/netty,wangyikai/netty,sameira/netty,f7753/netty,firebase/netty,maliqq/netty,carlbai/netty,bigheary/netty,unei66/netty,idelpivnitskiy/netty,CliffYuan/netty,WangJunTYTL/netty,qingsong-xu/netty,mcobrien/netty,shuangqiuan/netty,serioussam/netty,lznhust/netty,carl-mastrangelo/netty,caoyanwei/netty,netty/netty,altihou/netty,sunbeansoft/netty,timboudreau/netty,andsel/netty,orika/netty,afds/netty,qingsong-xu/netty,hyangtack/netty,Kalvar/netty,serioussam/netty,altihou/netty,f7753/netty,eonezhang/netty,fenik17/netty,golovnin/netty,olupotd/netty,golovnin/netty,lukehutch/netty,skyao/netty,Alwayswithme/netty,Kingson4Wu/netty,alkemist/netty,seetharamireddy540/netty,wangyikai/netty,nat2013/netty,KatsuraKKKK/netty,satishsaley/netty,slandelle/netty,sverkera/netty,mcobrien/netty,fengjiachun/netty,jdivy/netty,duqiao/netty,nadeeshaan/netty,Techcable/netty,kvr000/netty,shism/netty,AchinthaReemal/netty,mikkokar/netty,ejona86/netty,clebertsuconic/netty,louiscryan/netty,joansmith/netty,balaprasanna/netty,duqiao/netty,windie/netty,MediumOne/netty,jroper/netty,KatsuraKKKK/netty,fenik17/netty,nmittler/netty,seetharamireddy540/netty,DavidAlphaFox/netty,xiongzheng/netty,mway08/netty,carl-mastrangelo/netty,unei66/netty,bob329/netty,sunbeansoft/netty,jongyeol/netty,drowning/netty,johnou/netty,jchambers/netty,idelpivnitskiy/netty,Techcable/netty,DolphinZhao/netty,aperepel/netty,wuyinxian124/netty,caoyanwei/netty,louxiu/netty,JungMinu/netty,wuyinxian124/netty,mcanthony/netty,tempbottle/netty,nayato/netty,danbev/netty,kjniemi/netty,DolphinZhao/netty,johnou/netty,Apache9/netty,carlbai/netty,alkemist/netty,KeyNexus/netty,mubarak/netty,x1957/netty,nkhuyu/netty,mcobrien/netty,tempbottle/netty,hgl888/netty,afredlyj/learn-netty,joansmith/netty,Mounika-Chirukuri/netty,WangJunTYTL/netty,cnoldtree/netty,mosoft521/netty,s-gheldd/netty,s-gheldd/netty,brennangaunce/netty,kvr000/netty,niuxinghua/netty,seetharamireddy540/netty,sverkera/netty,zzcclp/netty,danbev/netty,ngocdaothanh/netty,serioussam/netty,djchen/netty,blucas/netty,mx657649013/netty,eonezhang/netty,wuxiaowei907/netty,hepin1989/netty,moyiguket/netty,drowning/netty,yrcourage/netty,phlizik/netty,jdivy/netty,Squarespace/netty,Techcable/netty,yawkat/netty,afredlyj/learn-netty,alkemist/netty,purplefox/netty-4.0.2.8-hacked,huanyi0723/netty,lukehutch/netty,hyangtack/netty,kiril-me/netty,zxhfirefox/netty,xiongzheng/netty,NiteshKant/netty,ichaki5748/netty,zxhfirefox/netty,chrisprobst/netty,orika/netty,drowning/netty,shelsonjava/netty,fenik17/netty,blademainer/netty,mosoft521/netty,mcanthony/netty,moyiguket/netty,zxhfirefox/netty,netty/netty,djchen/netty,smayoorans/netty,mosoft521/netty,MediumOne/netty,purplefox/netty-4.0.2.8-hacked,woshilaiceshide/netty,gigold/netty,develar/netty,AnselQiao/netty,clebertsuconic/netty,junjiemars/netty,mubarak/netty,IBYoung/netty,DolphinZhao/netty,CodingFabian/netty,luyiisme/netty,sja/netty,dongjiaqiang/netty,bryce-anderson/netty,yonglehou/netty-1,bigheary/netty,Kalvar/netty,BrunoColin/netty,eonezhang/netty,buchgr/netty,huuthang1993/netty,doom369/netty,ninja-/netty,xiongzheng/netty,lightsocks/netty,dongjiaqiang/netty,clebertsuconic/netty,nat2013/netty,timboudreau/netty,shenguoquan/netty,liuciuse/netty,tbrooks8/netty,lukw00/netty,lightsocks/netty,xingguang2013/netty,phlizik/netty,satishsaley/netty,niuxinghua/netty,sverkera/netty,balaprasanna/netty,kjniemi/netty,yipen9/netty,danny200309/netty,wangyikai/netty,shuangqiuan/netty,qingsong-xu/netty,blademainer/netty,idelpivnitskiy/netty,exinguu/netty,chinayin/netty,bryce-anderson/netty,zhoffice/netty,AchinthaReemal/netty,hepin1989/netty,rovarga/netty,ioanbsu/netty,orika/netty,smayoorans/netty,CodingFabian/netty,woshilaiceshide/netty,nmittler/netty,silvaran/netty,luyiisme/netty,SinaTadayon/netty,jongyeol/netty,blucas/netty,zer0se7en/netty,mikkokar/netty,buchgr/netty,chanakaudaya/netty,ajaysarda/netty,DolphinZhao/netty,chrisprobst/netty,Mounika-Chirukuri/netty,Apache9/netty,zzcclp/netty,mosoft521/netty,jovezhougang/netty,NiteshKant/netty,eincs/netty,fantayeneh/netty,clebertsuconic/netty,KatsuraKKKK/netty,artgon/netty,yrcourage/netty,lukw00/netty,wuyinxian124/netty,mcobrien/netty,imangry/netty-zh,mikkokar/netty,DavidAlphaFox/netty,nayato/netty,louxiu/netty,DavidAlphaFox/netty,balaprasanna/netty,cnoldtree/netty,castomer/netty,mubarak/netty,chrisprobst/netty,doom369/netty,woshilaiceshide/netty,chinayin/netty,cnoldtree/netty,jongyeol/netty,CodingFabian/netty,mcanthony/netty,ejona86/netty,smayoorans/netty,nadeeshaan/netty,windie/netty,eonezhang/netty,windie/netty,jovezhougang/netty,fenik17/netty,blucas/netty,IBYoung/netty,shuangqiuan/netty,jongyeol/netty,olupotd/netty,CliffYuan/netty,shelsonjava/netty,buchgr/netty,menacher/netty,fengjiachun/netty,hgl888/netty,JungMinu/netty,luyiisme/netty,Kingson4Wu/netty,purplefox/netty-4.0.2.8-hacked,nadeeshaan/netty,lznhust/netty,carlbai/netty,andsel/netty,xiongzheng/netty,pengzj/netty,SinaTadayon/netty,ninja-/netty,Alwayswithme/netty,olupotd/netty,andsel/netty,junjiemars/netty,fengjiachun/netty,andsel/netty,fengjiachun/netty,nkhuyu/netty,LuminateWireless/netty,dongjiaqiang/netty,ioanbsu/netty,shism/netty,gerdriesselmann/netty,mubarak/netty,huanyi0723/netty,daschl/netty,dongjiaqiang/netty,moyiguket/netty,xiexingguang/netty,x1957/netty,mikkokar/netty,blademainer/netty,tbrooks8/netty,altihou/netty,JungMinu/netty,kjniemi/netty,ioanbsu/netty,normanmaurer/netty,castomer/netty,yipen9/netty,WangJunTYTL/netty,jenskordowski/netty,huuthang1993/netty,normanmaurer/netty,bigheary/netty,fantayeneh/netty,zer0se7en/netty,zhoffice/netty,huanyi0723/netty,yipen9/netty,ajaysarda/netty,xiexingguang/netty,jchambers/netty,chanakaudaya/netty,nkhuyu/netty,danbev/netty,niuxinghua/netty,KatsuraKKKK/netty,zhoffice/netty,xiongzheng/netty,johnou/netty,jdivy/netty,ninja-/netty,tempbottle/netty,lugt/netty,phlizik/netty,brennangaunce/netty,nmittler/netty,nayato/netty,alkemist/netty,lznhust/netty,artgon/netty,castomer/netty,gigold/netty,shelsonjava/netty,clebertsuconic/netty,ajaysarda/netty,kyle-liu/netty4study,timboudreau/netty,codevelop/netty,huanyi0723/netty,danny200309/netty,Spikhalskiy/netty,jovezhougang/netty,bryce-anderson/netty,AnselQiao/netty,hgl888/netty,sja/netty,youprofit/netty,gerdriesselmann/netty,imangry/netty-zh,nkhuyu/netty,MediumOne/netty,shism/netty,kiril-me/netty,ejona86/netty,lugt/netty,cnoldtree/netty,maliqq/netty,chanakaudaya/netty,ajaysarda/netty,olupotd/netty,silvaran/netty,zzcclp/netty,ichaki5748/netty,mway08/netty,gerdriesselmann/netty,hepin1989/netty,Alwayswithme/netty,skyao/netty,gerdriesselmann/netty,lightsocks/netty,hgl888/netty,fengshao0907/netty,menacher/netty,golovnin/netty,Squarespace/netty,ichaki5748/netty,mx657649013/netty,netty/netty,ijuma/netty,blademainer/netty,liuciuse/netty,olupotd/netty,djchen/netty,satishsaley/netty,mx657649013/netty,alkemist/netty,mx657649013/netty,gigold/netty,IBYoung/netty,castomer/netty,s-gheldd/netty,idelpivnitskiy/netty,junjiemars/netty,cnoldtree/netty,chinayin/netty,liyang1025/netty,lugt/netty,unei66/netty,carlbai/netty,ninja-/netty,lightsocks/netty,kvr000/netty,Scottmitch/netty,windie/netty,BrunoColin/netty,eincs/netty,f7753/netty,huuthang1993/netty,youprofit/netty,normanmaurer/netty,DavidAlphaFox/netty,danbev/netty,tbrooks8/netty,Scottmitch/netty,duqiao/netty,x1957/netty,ninja-/netty,fantayeneh/netty,artgon/netty,Techcable/netty,louiscryan/netty,satishsaley/netty,wuxiaowei907/netty,yonglehou/netty-1,fantayeneh/netty,afds/netty,unei66/netty,x1957/netty,netty/netty,ijuma/netty,chrisprobst/netty,xiexingguang/netty,maliqq/netty,luyiisme/netty,sja/netty,jongyeol/netty,zer0se7en/netty,zxhfirefox/netty,Alwayswithme/netty,shenguoquan/netty,normanmaurer/netty,nat2013/netty,xiexingguang/netty,idelpivnitskiy/netty,caoyanwei/netty,jenskordowski/netty,ifesdjeen/netty,Scottmitch/netty,danbev/netty,slandelle/netty,mosoft521/netty,bob329/netty,kiril-me/netty,niuxinghua/netty,Scottmitch/netty,yawkat/netty,brennangaunce/netty,smayoorans/netty,mway08/netty,zhujingling/netty,Apache9/netty,golovnin/netty,joansmith/netty,sameira/netty,Spikhalskiy/netty,NiteshKant/netty
|
/*
* JBoss, Home of Professional Open Source
*
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* by the @author tags. See the COPYRIGHT.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.netty.channel;
/**
* A {@link ChannelHandler} that is notified when it is added to or removed
* from a {@link ChannelPipeline}. Please note that the methods of this
* handler is called only when the {@link ChannelPipeline} it belongs to has
* been {@linkplain ChannelPipeline#attach(Channel, ChannelSink) attached}.
*
* @author The Netty Project (netty-dev@lists.jboss.org)
* @author Trustin Lee (tlee@redhat.com)
* @version $Rev$, $Date$
*/
public interface LifeCycleAwareChannelHandler extends ChannelHandler {
void beforeAdd(ChannelHandlerContext ctx) throws Exception;
void afterAdd(ChannelHandlerContext ctx) throws Exception;
void beforeRemove(ChannelHandlerContext ctx) throws Exception;
void afterRemove(ChannelHandlerContext ctx) throws Exception;
}
|
src/main/java/org/jboss/netty/channel/LifeCycleAwareChannelHandler.java
|
/*
* JBoss, Home of Professional Open Source
*
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* by the @author tags. See the COPYRIGHT.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.netty.channel;
/**
* A {@link ChannelHandler} that is notified when it is added to or removed
* from a {@link ChannelPipeline}. Please note that the methods of this
* handler is called only when the {@link ChannelPipeline} it belongs to has
* been {@linkplain ChannelPipeline#attach(Channel, ChannelSink) attached}.
*
* @author The Netty Project (netty-dev@lists.jboss.org)
* @author Trustin Lee (tlee@redhat.com)
* @version $Rev$, $Date$
*/
public interface LifeCycleAwareChannelHandler extends ChannelHandlerContext {
void beforeAdd(ChannelHandlerContext ctx) throws Exception;
void afterAdd(ChannelHandlerContext ctx) throws Exception;
void beforeRemove(ChannelHandlerContext ctx) throws Exception;
void afterRemove(ChannelHandlerContext ctx) throws Exception;
}
|
Fixed a silly mistake - LifeCycleAwareChannelHandler should implement ChannelHandler instead of ChannelHandlerContext
|
src/main/java/org/jboss/netty/channel/LifeCycleAwareChannelHandler.java
|
Fixed a silly mistake - LifeCycleAwareChannelHandler should implement ChannelHandler instead of ChannelHandlerContext
|
|
Java
|
apache-2.0
|
d4ff992d81dac485aa55736ac16b6a1e03d7965c
| 0
|
mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.file;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Collections.singleton;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.segment.SegmentWriterBuilder.segmentWriterBuilder;
import java.io.File;
import java.io.IOException;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import org.apache.jackrabbit.oak.segment.RecordId;
import org.apache.jackrabbit.oak.segment.SegmentNodeState;
import org.apache.jackrabbit.oak.segment.SegmentVersion;
import org.apache.jackrabbit.oak.segment.SegmentWriter;
import org.apache.jackrabbit.oak.segment.WriterCacheManager;
import org.apache.jackrabbit.oak.segment.compaction.LoggingGCMonitor;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
import org.apache.jackrabbit.oak.segment.file.FileStore.ReadOnlyStore;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.gc.DelegatingGCMonitor;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Builder for creating {@link FileStore} instances.
*/
public class FileStoreBuilder {
private static final Logger LOG = LoggerFactory.getLogger(FileStore.class);
@Nonnull
private final File directory;
@CheckForNull
private BlobStore blobStore; // null -> store blobs inline
private int maxFileSize = 256;
private int cacheSize; // 0 -> DEFAULT_MEMORY_CACHE_SIZE
private boolean memoryMapping;
@Nonnull
private final DelegatingGCMonitor gcMonitor = new DelegatingGCMonitor(
singleton(new LoggingGCMonitor(LOG)));
@Nonnull
private StatisticsProvider statsProvider = StatisticsProvider.NOOP;
@Nonnull
private SegmentVersion version = SegmentVersion.LATEST_VERSION;
@Nonnull
private SegmentGCOptions gcOptions = SegmentGCOptions.DEFAULT;
@Nonnull
private GCListener gcListener;
@Nonnull
private final WriterCacheManager cacheManager = new WriterCacheManager.Default() {{
gcListener = new GCListener() {
@Override
public void info(String message, Object... arguments) {
gcMonitor.info(message, arguments);
}
@Override
public void warn(String message, Object... arguments) {
gcMonitor.warn(message, arguments);
}
@Override
public void error(String message, Exception exception) {
gcMonitor.error(message, exception);
}
@Override
public void skipped(String reason, Object... arguments) {
gcMonitor.skipped(reason, arguments);
}
@Override
public void compacted(long[] segmentCounts, long[] recordCounts, long[] compactionMapWeights) {
gcMonitor.compacted(segmentCounts, recordCounts, compactionMapWeights);
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
gcMonitor.cleaned(reclaimedSize, currentSize);
}
@Override
public void compacted(@Nonnull Status status, final int newGeneration) {
switch (status) {
case SUCCESS:
evictCaches(new Predicate<Integer>() {
@Override
public boolean apply(Integer generation) {
return generation < newGeneration;
}
});
break;
case FAILURE:
evictCaches(new Predicate<Integer>() {
@Override
public boolean apply(Integer generation) {
return generation == newGeneration;
}
});
break;
}
}
};
}};
@CheckForNull
private TarRevisions revisions;
/**
* Create a new instance of a {@code FileStoreBuilder} for a file store.
* @param directory directory where the tar files are stored
* @return a new {@code FileStoreBuilder} instance.
*/
@Nonnull
public static FileStoreBuilder fileStoreBuilder(@Nonnull File directory) {
return new FileStoreBuilder(directory);
}
private FileStoreBuilder(@Nonnull File directory) {
this.directory = checkNotNull(directory);
}
/**
* Specify the {@link BlobStore}.
* @param blobStore
* @return this instance
*/
@Nonnull
public FileStoreBuilder withBlobStore(@Nonnull BlobStore blobStore) {
this.blobStore = checkNotNull(blobStore);
return this;
}
/**
* Maximal size of the generated tar files in MB.
* @param maxFileSize
* @return this instance
*/
@Nonnull
public FileStoreBuilder withMaxFileSize(int maxFileSize) {
this.maxFileSize = maxFileSize;
return this;
}
/**
* Size of the cache in MB.
* @param cacheSize
* @return this instance
*/
@Nonnull
public FileStoreBuilder withCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
return this;
}
/**
* Turn caching off
* @return this instance
*/
@Nonnull
public FileStoreBuilder withNoCache() {
this.cacheSize = -1;
return this;
}
/**
* Turn memory mapping on or off
* @param memoryMapping
* @return this instance
*/
@Nonnull
public FileStoreBuilder withMemoryMapping(boolean memoryMapping) {
this.memoryMapping = memoryMapping;
return this;
}
/**
* Set memory mapping to the default value based on OS properties
* @return this instance
*/
@Nonnull
public FileStoreBuilder withDefaultMemoryMapping() {
this.memoryMapping = FileStore.MEMORY_MAPPING_DEFAULT;
return this;
}
/**
* {@link GCMonitor} for monitoring this files store's gc process.
* @param gcMonitor
* @return this instance
*/
@Nonnull
public FileStoreBuilder withGCMonitor(@Nonnull GCMonitor gcMonitor) {
this.gcMonitor.registerGCMonitor(checkNotNull(gcMonitor));
return this;
}
/**
* {@link StatisticsProvider} for collecting statistics related to FileStore
* @param statisticsProvider
* @return this instance
*/
@Nonnull
public FileStoreBuilder withStatisticsProvider(@Nonnull StatisticsProvider statisticsProvider) {
this.statsProvider = checkNotNull(statisticsProvider);
return this;
}
/**
* {@link SegmentVersion} the segment version of the store
* @param version
* @return this instance
*/
@Nonnull
public FileStoreBuilder withSegmentVersion(SegmentVersion version) {
this.version = checkNotNull(version);
return this;
}
/**
* {@link SegmentGCOptions} the garbage collection options of the store
* @param gcOptions
* @return this instance
*/
@Nonnull
public FileStoreBuilder withGCOptions(SegmentGCOptions gcOptions) {
this.gcOptions = checkNotNull(gcOptions);
return this;
}
/**
* Create a new {@link FileStore} instance with the settings specified in this
* builder. If none of the {@code with} methods have been called before calling
* this method, a file store with the following default settings is returned:
* <ul>
* <li>blob store: inline</li>
* <li>max file size: 256MB</li>
* <li>cache size: 256MB</li>
* <li>memory mapping: on for 64 bit JVMs off otherwise</li>
* <li>whiteboard: none. No {@link GCMonitor} tracking</li>
* <li>statsProvider: {@link StatisticsProvider#NOOP}</li>
* <li>GC options: {@link SegmentGCOptions#DEFAULT}</li>
* </ul>
*
* @return a new file store instance
* @throws IOException
*/
@Nonnull
public FileStore build() throws IOException {
directory.mkdir();
revisions = new TarRevisions(false, directory);
FileStore store = new FileStore(this, false);
revisions.bind(store, store.getTracker(), initialNode(store));
return store;
}
/**
* Create a new {@link ReadOnlyStore} instance with the settings specified in this
* builder. If none of the {@code with} methods have been called before calling
* this method, a file store with the following default settings is returned:
* <ul>
* <li>blob store: inline</li>
* <li>max file size: 256MB</li>
* <li>cache size: 256MB</li>
* <li>memory mapping: on for 64 bit JVMs off otherwise</li>
* <li>whiteboard: none. No {@link GCMonitor} tracking</li>
* <li>statsProvider: {@link StatisticsProvider#NOOP}</li>
* <li>GC options: {@link SegmentGCOptions#DEFAULT}</li>
* </ul>
*
* @return a new file store instance
* @throws IOException
*/
@Nonnull
public ReadOnlyStore buildReadOnly() throws IOException {
checkState(directory.exists() && directory.isDirectory());
revisions = new TarRevisions(true, directory);
ReadOnlyStore store = new ReadOnlyStore(this);
revisions.bind(store, store.getTracker(), initialNode(store));
return store;
}
@Nonnull
private static Supplier<RecordId> initialNode(final FileStore store) {
return new Supplier<RecordId>() {
@Override
public RecordId get() {
try {
SegmentWriter writer = segmentWriterBuilder("init").build(store);
NodeBuilder builder = EMPTY_NODE.builder();
builder.setChildNode("root", EMPTY_NODE);
SegmentNodeState node = writer.writeNode(builder.getNodeState());
writer.flush();
return node.getRecordId();
} catch (IOException e) {
String msg = "Failed to write initial node";
LOG.error(msg, e);
throw new IllegalStateException(msg, e);
}
}
};
}
@Nonnull
File getDirectory() {
return directory;
}
@CheckForNull
BlobStore getBlobStore() {
return blobStore;
}
public int getMaxFileSize() {
return maxFileSize;
}
int getCacheSize() {
return cacheSize;
}
boolean getMemoryMapping() {
return memoryMapping;
}
@Nonnull
GCListener getGcListener() {
return gcListener;
}
@Nonnull
StatisticsProvider getStatsProvider() {
return statsProvider;
}
@Nonnull
SegmentVersion getVersion() {
return version;
}
@Nonnull
SegmentGCOptions getGcOptions() {
return gcOptions;
}
@Nonnull
TarRevisions getRevisions() {
checkState(revisions != null, "File store not yet built");
return revisions;
}
@Nonnull
WriterCacheManager getCacheManager() {
return cacheManager;
}
}
|
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStoreBuilder.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.file;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Collections.singleton;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.segment.SegmentWriterBuilder.segmentWriterBuilder;
import java.io.File;
import java.io.IOException;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import org.apache.jackrabbit.oak.segment.RecordId;
import org.apache.jackrabbit.oak.segment.SegmentNodeState;
import org.apache.jackrabbit.oak.segment.SegmentVersion;
import org.apache.jackrabbit.oak.segment.SegmentWriter;
import org.apache.jackrabbit.oak.segment.WriterCacheManager;
import org.apache.jackrabbit.oak.segment.compaction.LoggingGCMonitor;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
import org.apache.jackrabbit.oak.segment.file.FileStore.ReadOnlyStore;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.gc.DelegatingGCMonitor;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Builder for creating {@link FileStore} instances.
*/
public class FileStoreBuilder {
private static final Logger LOG = LoggerFactory.getLogger(FileStore.class);
@Nonnull
private final File directory;
@CheckForNull
private BlobStore blobStore; // null -> store blobs inline
private int maxFileSize = 256;
private int cacheSize; // 0 -> DEFAULT_MEMORY_CACHE_SIZE
private boolean memoryMapping;
@Nonnull
private final DelegatingGCMonitor gcMonitor = new DelegatingGCMonitor(
singleton(new LoggingGCMonitor(LOG)));
@Nonnull
private StatisticsProvider statsProvider = StatisticsProvider.NOOP;
@Nonnull
private SegmentVersion version = SegmentVersion.LATEST_VERSION;
@Nonnull
private SegmentGCOptions gcOptions = SegmentGCOptions.DEFAULT;
@Nonnull
private GCListener gcListener;
@Nonnull
private final WriterCacheManager cacheManager = new WriterCacheManager.Default() {{
gcListener = new GCListener() {
@Override
public void info(String message, Object... arguments) {
gcMonitor.info(message, arguments);
}
@Override
public void warn(String message, Object... arguments) {
gcMonitor.warn(message, arguments);
}
@Override
public void error(String message, Exception exception) {
gcMonitor.error(message, exception);
}
@Override
public void skipped(String reason, Object... arguments) {
gcMonitor.skipped(reason, arguments);
}
@Override
public void compacted(long[] segmentCounts, long[] recordCounts, long[] compactionMapWeights) {
gcMonitor.compacted(segmentCounts, recordCounts, compactionMapWeights);
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
gcMonitor.cleaned(reclaimedSize, currentSize);
}
@Override
public void compacted(@Nonnull Status status, final int newGeneration) {
switch (status) {
case SUCCESS:
evictCaches(new Predicate<Integer>() {
@Override
public boolean apply(Integer generation) {
return generation < newGeneration;
}
});
break;
case FAILURE:
evictCaches(new Predicate<Integer>() {
@Override
public boolean apply(Integer generation) {
return generation == newGeneration;
}
});
break;
}
}
};
}};
@CheckForNull
private TarRevisions revisions;
/**
* Create a new instance of a {@code FileStoreBuilder} for a file store.
* @param directory directory where the tar files are stored
* @return a new {@code FileStoreBuilder} instance.
*/
@Nonnull
public static FileStoreBuilder fileStoreBuilder(@Nonnull File directory) {
return new FileStoreBuilder(directory);
}
private FileStoreBuilder(@Nonnull File directory) {
this.directory = checkNotNull(directory);
}
/**
* Specify the {@link BlobStore}.
* @param blobStore
* @return this instance
*/
@Nonnull
public FileStoreBuilder withBlobStore(@Nonnull BlobStore blobStore) {
this.blobStore = checkNotNull(blobStore);
return this;
}
/**
* Maximal size of the generated tar files in MB.
* @param maxFileSize
* @return this instance
*/
@Nonnull
public FileStoreBuilder withMaxFileSize(int maxFileSize) {
this.maxFileSize = maxFileSize;
return this;
}
/**
* Size of the cache in MB.
* @param cacheSize
* @return this instance
*/
@Nonnull
public FileStoreBuilder withCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
return this;
}
/**
* Turn caching off
* @return this instance
*/
@Nonnull
public FileStoreBuilder withNoCache() {
this.cacheSize = -1;
return this;
}
/**
* Turn memory mapping on or off
* @param memoryMapping
* @return this instance
*/
@Nonnull
public FileStoreBuilder withMemoryMapping(boolean memoryMapping) {
this.memoryMapping = memoryMapping;
return this;
}
/**
* Set memory mapping to the default value based on OS properties
* @return this instance
*/
@Nonnull
public FileStoreBuilder withDefaultMemoryMapping() {
this.memoryMapping = FileStore.MEMORY_MAPPING_DEFAULT;
return this;
}
/**
* {@link GCMonitor} for monitoring this files store's gc process.
* @param gcMonitor
* @return this instance
*/
@Nonnull
public FileStoreBuilder withGCMonitor(@Nonnull GCMonitor gcMonitor) {
this.gcMonitor.registerGCMonitor(checkNotNull(gcMonitor));
return this;
}
/**
* {@link StatisticsProvider} for collecting statistics related to FileStore
* @param statisticsProvider
* @return this instance
*/
@Nonnull
public FileStoreBuilder withStatisticsProvider(@Nonnull StatisticsProvider statisticsProvider) {
this.statsProvider = checkNotNull(statisticsProvider);
return this;
}
/**
* {@link SegmentVersion} the segment version of the store
* @param version
* @return this instance
*/
@Nonnull
public FileStoreBuilder withSegmentVersion(SegmentVersion version) {
this.version = checkNotNull(version);
return this;
}
@Nonnull
public FileStoreBuilder withGCOptions(SegmentGCOptions gcOptions) {
this.gcOptions = checkNotNull(gcOptions);
return this;
}
/**
* Create a new {@link FileStore} instance with the settings specified in this
* builder. If none of the {@code with} methods have been called before calling
* this method, a file store with the following default settings is returned:
* <ul>
* <li>blob store: inline</li>
* <li>root: empty node</li>
* <li>max file size: 256MB</li>
* <li>cache size: 256MB</li>
* <li>memory mapping: on for 64 bit JVMs off otherwise</li>
* <li>whiteboard: none. No {@link GCMonitor} tracking</li>
* <li>statsProvider: StatisticsProvider.NOOP</li>
* </ul>
*
* @return a new file store instance
* @throws IOException
*/
@Nonnull
public FileStore build() throws IOException {
directory.mkdir();
revisions = new TarRevisions(false, directory);
FileStore store = new FileStore(this, false);
revisions.bind(store, store.getTracker(), initialNode(store));
return store;
}
@Nonnull
public ReadOnlyStore buildReadOnly() throws IOException {
checkState(directory.exists() && directory.isDirectory());
revisions = new TarRevisions(true, directory);
ReadOnlyStore store = new ReadOnlyStore(this);
revisions.bind(store, store.getTracker(), initialNode(store));
return store;
}
@Nonnull
private static Supplier<RecordId> initialNode(final FileStore store) {
return new Supplier<RecordId>() {
@Override
public RecordId get() {
try {
SegmentWriter writer = segmentWriterBuilder("init").build(store);
NodeBuilder builder = EMPTY_NODE.builder();
builder.setChildNode("root", EMPTY_NODE);
SegmentNodeState node = writer.writeNode(builder.getNodeState());
writer.flush();
return node.getRecordId();
} catch (IOException e) {
String msg = "Failed to write initial node";
LOG.error(msg, e);
throw new IllegalStateException(msg, e);
}
}
};
}
@Nonnull
File getDirectory() {
return directory;
}
@CheckForNull
BlobStore getBlobStore() {
return blobStore;
}
public int getMaxFileSize() {
return maxFileSize;
}
int getCacheSize() {
return cacheSize;
}
boolean getMemoryMapping() {
return memoryMapping;
}
@Nonnull
GCListener getGcListener() {
return gcListener;
}
@Nonnull
StatisticsProvider getStatsProvider() {
return statsProvider;
}
@Nonnull
SegmentVersion getVersion() {
return version;
}
@Nonnull
SegmentGCOptions getGcOptions() {
return gcOptions;
}
@Nonnull
TarRevisions getRevisions() {
checkState(revisions != null, "File store not yet built");
return revisions;
}
@Nonnull
WriterCacheManager getCacheManager() {
return cacheManager;
}
}
|
OAK-4277: Finalise de-duplication caches
Javadoc
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1747162 13f79535-47bb-0310-9956-ffa450edef68
|
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStoreBuilder.java
|
OAK-4277: Finalise de-duplication caches Javadoc
|
|
Java
|
apache-2.0
|
a4cf78a6207e5782d6a9b90f14a52955aca2c208
| 0
|
irudyak/ignite,nizhikov/ignite,leveyj/ignite,DoudTechData/ignite,shroman/ignite,f7753/ignite,mcherkasov/ignite,VladimirErshov/ignite,dream-x/ignite,endian675/ignite,samaitra/ignite,sk0x50/ignite,samaitra/ignite,agura/incubator-ignite,nizhikov/ignite,pperalta/ignite,a1vanov/ignite,dream-x/ignite,ascherbakoff/ignite,andrey-kuznetsov/ignite,murador/ignite,apache/ignite,ntikhonov/ignite,WilliamDo/ignite,ilantukh/ignite,ntikhonov/ignite,ilantukh/ignite,leveyj/ignite,mcherkasov/ignite,apache/ignite,agura/incubator-ignite,StalkXT/ignite,ilantukh/ignite,BiryukovVA/ignite,amirakhmedov/ignite,NSAmelchev/ignite,voipp/ignite,sk0x50/ignite,StalkXT/ignite,nivanov/ignite,vadopolski/ignite,rfqu/ignite,chandresh-pancholi/ignite,StalkXT/ignite,BiryukovVA/ignite,ryanzz/ignite,xtern/ignite,voipp/ignite,xtern/ignite,f7753/ignite,wmz7year/ignite,ilantukh/ignite,WilliamDo/ignite,ascherbakoff/ignite,kromulan/ignite,daradurvs/ignite,SharplEr/ignite,irudyak/ignite,andrey-kuznetsov/ignite,shroman/ignite,WilliamDo/ignite,andrey-kuznetsov/ignite,sk0x50/ignite,StalkXT/ignite,a1vanov/ignite,ptupitsyn/ignite,kromulan/ignite,apache/ignite,VladimirErshov/ignite,samaitra/ignite,amirakhmedov/ignite,murador/ignite,vadopolski/ignite,DoudTechData/ignite,wmz7year/ignite,samaitra/ignite,ilantukh/ignite,WilliamDo/ignite,chandresh-pancholi/ignite,ptupitsyn/ignite,vldpyatkov/ignite,vadopolski/ignite,nizhikov/ignite,tkpanther/ignite,alexzaitzev/ignite,ryanzz/ignite,leveyj/ignite,shroman/ignite,rfqu/ignite,ryanzz/ignite,xtern/ignite,ptupitsyn/ignite,tkpanther/ignite,VladimirErshov/ignite,apache/ignite,wmz7year/ignite,alexzaitzev/ignite,agura/incubator-ignite,ptupitsyn/ignite,agura/incubator-ignite,dream-x/ignite,ntikhonov/ignite,apache/ignite,VladimirErshov/ignite,ryanzz/ignite,SomeFire/ignite,sk0x50/ignite,vldpyatkov/ignite,tkpanther/ignite,afinka77/ignite,chandresh-pancholi/ignite,rfqu/ignite,NSAmelchev/ignite,VladimirErshov/ignite,afinka77/ignite,ntikhonov/ignite,DoudTechData/ignite,tkpanther/ignite,NSAmelchev/ignite,ptupitsyn/ignite,amirakhmedov/ignite,psadusumilli/ignite,ilantukh/ignite,vadopolski/ignite,amirakhmedov/ignite,ptupitsyn/ignite,nivanov/ignite,SharplEr/ignite,SomeFire/ignite,DoudTechData/ignite,vadopolski/ignite,SomeFire/ignite,pperalta/ignite,ntikhonov/ignite,daradurvs/ignite,ntikhonov/ignite,irudyak/ignite,WilliamDo/ignite,chandresh-pancholi/ignite,BiryukovVA/ignite,nizhikov/ignite,tkpanther/ignite,nivanov/ignite,DoudTechData/ignite,VladimirErshov/ignite,shroman/ignite,DoudTechData/ignite,samaitra/ignite,f7753/ignite,voipp/ignite,DoudTechData/ignite,andrey-kuznetsov/ignite,voipp/ignite,vladisav/ignite,rfqu/ignite,a1vanov/ignite,BiryukovVA/ignite,wmz7year/ignite,shroman/ignite,shroman/ignite,ascherbakoff/ignite,amirakhmedov/ignite,leveyj/ignite,daradurvs/ignite,SomeFire/ignite,leveyj/ignite,vldpyatkov/ignite,mcherkasov/ignite,afinka77/ignite,andrey-kuznetsov/ignite,nizhikov/ignite,nivanov/ignite,endian675/ignite,NSAmelchev/ignite,SharplEr/ignite,andrey-kuznetsov/ignite,afinka77/ignite,mcherkasov/ignite,BiryukovVA/ignite,ptupitsyn/ignite,BiryukovVA/ignite,a1vanov/ignite,ryanzz/ignite,psadusumilli/ignite,pperalta/ignite,psadusumilli/ignite,SharplEr/ignite,endian675/ignite,kromulan/ignite,samaitra/ignite,dream-x/ignite,ryanzz/ignite,irudyak/ignite,SharplEr/ignite,shroman/ignite,nivanov/ignite,SharplEr/ignite,dream-x/ignite,wmz7year/ignite,kromulan/ignite,a1vanov/ignite,NSAmelchev/ignite,irudyak/ignite,endian675/ignite,agura/incubator-ignite,irudyak/ignite,WilliamDo/ignite,ascherbakoff/ignite,ilantukh/ignite,voipp/ignite,ilantukh/ignite,ntikhonov/ignite,andrey-kuznetsov/ignite,murador/ignite,xtern/ignite,NSAmelchev/ignite,apache/ignite,StalkXT/ignite,samaitra/ignite,samaitra/ignite,vladisav/ignite,sk0x50/ignite,ilantukh/ignite,chandresh-pancholi/ignite,apache/ignite,VladimirErshov/ignite,StalkXT/ignite,chandresh-pancholi/ignite,SomeFire/ignite,alexzaitzev/ignite,rfqu/ignite,voipp/ignite,rfqu/ignite,nivanov/ignite,irudyak/ignite,pperalta/ignite,ascherbakoff/ignite,amirakhmedov/ignite,wmz7year/ignite,murador/ignite,vladisav/ignite,alexzaitzev/ignite,endian675/ignite,psadusumilli/ignite,SomeFire/ignite,daradurvs/ignite,NSAmelchev/ignite,BiryukovVA/ignite,ascherbakoff/ignite,murador/ignite,BiryukovVA/ignite,f7753/ignite,andrey-kuznetsov/ignite,mcherkasov/ignite,murador/ignite,vladisav/ignite,nivanov/ignite,vldpyatkov/ignite,daradurvs/ignite,ascherbakoff/ignite,mcherkasov/ignite,ptupitsyn/ignite,nizhikov/ignite,a1vanov/ignite,chandresh-pancholi/ignite,ascherbakoff/ignite,shroman/ignite,NSAmelchev/ignite,psadusumilli/ignite,agura/incubator-ignite,kromulan/ignite,pperalta/ignite,vldpyatkov/ignite,vadopolski/ignite,daradurvs/ignite,f7753/ignite,irudyak/ignite,BiryukovVA/ignite,daradurvs/ignite,StalkXT/ignite,SharplEr/ignite,rfqu/ignite,tkpanther/ignite,alexzaitzev/ignite,psadusumilli/ignite,pperalta/ignite,dream-x/ignite,xtern/ignite,f7753/ignite,samaitra/ignite,tkpanther/ignite,sk0x50/ignite,daradurvs/ignite,agura/incubator-ignite,SharplEr/ignite,endian675/ignite,murador/ignite,tkpanther/ignite,mcherkasov/ignite,VladimirErshov/ignite,BiryukovVA/ignite,endian675/ignite,samaitra/ignite,voipp/ignite,nizhikov/ignite,ryanzz/ignite,amirakhmedov/ignite,leveyj/ignite,daradurvs/ignite,NSAmelchev/ignite,a1vanov/ignite,endian675/ignite,ascherbakoff/ignite,voipp/ignite,pperalta/ignite,voipp/ignite,daradurvs/ignite,irudyak/ignite,f7753/ignite,kromulan/ignite,xtern/ignite,afinka77/ignite,alexzaitzev/ignite,shroman/ignite,SharplEr/ignite,nivanov/ignite,apache/ignite,vadopolski/ignite,SomeFire/ignite,a1vanov/ignite,xtern/ignite,dream-x/ignite,xtern/ignite,vldpyatkov/ignite,ptupitsyn/ignite,andrey-kuznetsov/ignite,xtern/ignite,vladisav/ignite,sk0x50/ignite,dream-x/ignite,kromulan/ignite,afinka77/ignite,f7753/ignite,vldpyatkov/ignite,chandresh-pancholi/ignite,agura/incubator-ignite,nizhikov/ignite,leveyj/ignite,sk0x50/ignite,StalkXT/ignite,ilantukh/ignite,vadopolski/ignite,afinka77/ignite,kromulan/ignite,ryanzz/ignite,mcherkasov/ignite,amirakhmedov/ignite,shroman/ignite,rfqu/ignite,pperalta/ignite,psadusumilli/ignite,wmz7year/ignite,nizhikov/ignite,vladisav/ignite,afinka77/ignite,ptupitsyn/ignite,SomeFire/ignite,apache/ignite,SomeFire/ignite,vladisav/ignite,WilliamDo/ignite,murador/ignite,sk0x50/ignite,vladisav/ignite,DoudTechData/ignite,SomeFire/ignite,WilliamDo/ignite,amirakhmedov/ignite,alexzaitzev/ignite,chandresh-pancholi/ignite,ntikhonov/ignite,alexzaitzev/ignite,leveyj/ignite,alexzaitzev/ignite,vldpyatkov/ignite,psadusumilli/ignite,wmz7year/ignite,StalkXT/ignite,andrey-kuznetsov/ignite
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.jdbc;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.Executor;
import org.apache.ignite.internal.client.GridClient;
import org.apache.ignite.internal.client.GridClientConfiguration;
import org.apache.ignite.internal.client.GridClientDisconnectedException;
import org.apache.ignite.internal.client.GridClientException;
import org.apache.ignite.internal.client.GridClientFactory;
import org.apache.ignite.internal.client.GridClientFutureTimeoutException;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.plugin.security.SecurityCredentials;
import org.apache.ignite.plugin.security.SecurityCredentialsBasicProvider;
import static java.sql.ResultSet.CONCUR_READ_ONLY;
import static java.sql.ResultSet.HOLD_CURSORS_OVER_COMMIT;
import static java.sql.ResultSet.TYPE_FORWARD_ONLY;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.ignite.IgniteJdbcDriver.PROP_CACHE;
import static org.apache.ignite.IgniteJdbcDriver.PROP_HOST;
import static org.apache.ignite.IgniteJdbcDriver.PROP_NODE_ID;
import static org.apache.ignite.IgniteJdbcDriver.PROP_PORT;
/**
* JDBC connection implementation.
*
* @deprecated Using Ignite client node based JDBC driver is preferable.
* See documentation of {@link org.apache.ignite.IgniteJdbcDriver} for details.
*/
@Deprecated
public class JdbcConnection implements Connection {
/** Validation task name. */
private static final String VALID_TASK_NAME =
"org.apache.ignite.internal.processors.cache.query.jdbc.GridCacheQueryJdbcValidationTask";
/** Ignite client. */
private final GridClient client;
/** Cache name. */
private String cacheName;
/** Closed flag. */
private boolean closed;
/** URL. */
private String url;
/** Node ID. */
private UUID nodeId;
/** Timeout. */
private int timeout;
/**
* Creates new connection.
*
* @param url Connection URL.
* @param props Additional properties.
* @throws SQLException In case Ignite client failed to start.
*/
public JdbcConnection(String url, Properties props) throws SQLException {
assert url != null;
assert props != null;
this.url = url;
cacheName = props.getProperty(PROP_CACHE);
String nodeIdProp = props.getProperty(PROP_NODE_ID);
if (nodeIdProp != null)
nodeId = UUID.fromString(nodeIdProp);
try {
GridClientConfiguration cfg = new GridClientConfiguration(props);
cfg.setServers(Collections.singleton(props.getProperty(PROP_HOST) + ":" + props.getProperty(PROP_PORT)));
String user = props.getProperty("user");
String passwd = props.getProperty("password");
if (!F.isEmpty(user)) {
SecurityCredentials creds = new SecurityCredentials(user, passwd);
cfg.setSecurityCredentialsProvider(new SecurityCredentialsBasicProvider(creds));
}
// Disable all fetching and caching for metadata.
cfg.setEnableMetricsCache(false);
cfg.setEnableAttributesCache(false);
cfg.setAutoFetchMetrics(false);
cfg.setAutoFetchAttributes(false);
client = GridClientFactory.start(cfg);
}
catch (GridClientException e) {
throw new SQLException("Failed to start Ignite client.", e);
}
if (!isValid(2))
throw new SQLException("Client is invalid. Probably cache name is wrong.");
}
/** {@inheritDoc} */
@Override public Statement createStatement() throws SQLException {
return createStatement(TYPE_FORWARD_ONLY, CONCUR_READ_ONLY, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql) throws SQLException {
ensureNotClosed();
return prepareStatement(sql, TYPE_FORWARD_ONLY, CONCUR_READ_ONLY, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public CallableStatement prepareCall(String sql) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Callable functions are not supported.");
}
/** {@inheritDoc} */
@Override public String nativeSQL(String sql) throws SQLException {
ensureNotClosed();
return sql;
}
/** {@inheritDoc} */
@Override public void setAutoCommit(boolean autoCommit) throws SQLException {
ensureNotClosed();
if (!autoCommit)
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public boolean getAutoCommit() throws SQLException {
ensureNotClosed();
return true;
}
/** {@inheritDoc} */
@Override public void commit() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void rollback() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void close() throws SQLException {
if (closed)
return;
closed = true;
GridClientFactory.stop(client.id(), false);
}
/** {@inheritDoc} */
@Override public boolean isClosed() throws SQLException {
return closed;
}
/** {@inheritDoc} */
@Override public DatabaseMetaData getMetaData() throws SQLException {
ensureNotClosed();
return new JdbcDatabaseMetadata(this);
}
/** {@inheritDoc} */
@Override public void setReadOnly(boolean readOnly) throws SQLException {
ensureNotClosed();
if (!readOnly)
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public boolean isReadOnly() throws SQLException {
ensureNotClosed();
return true;
}
/** {@inheritDoc} */
@Override public void setCatalog(String catalog) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Catalogs are not supported.");
}
/** {@inheritDoc} */
@Override public String getCatalog() throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public void setTransactionIsolation(int level) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public int getTransactionIsolation() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public SQLWarning getWarnings() throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public void clearWarnings() throws SQLException {
ensureNotClosed();
}
/** {@inheritDoc} */
@Override public Statement createStatement(int resSetType, int resSetConcurrency) throws SQLException {
return createStatement(resSetType, resSetConcurrency, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int resSetType,
int resSetConcurrency) throws SQLException {
ensureNotClosed();
return prepareStatement(sql, resSetType, resSetConcurrency, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public CallableStatement prepareCall(String sql, int resSetType,
int resSetConcurrency) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Callable functions are not supported.");
}
/** {@inheritDoc} */
@Override public Map<String, Class<?>> getTypeMap() throws SQLException {
throw new SQLFeatureNotSupportedException("Types mapping is not supported.");
}
/** {@inheritDoc} */
@Override public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Types mapping is not supported.");
}
/** {@inheritDoc} */
@Override public void setHoldability(int holdability) throws SQLException {
ensureNotClosed();
if (holdability != HOLD_CURSORS_OVER_COMMIT)
throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported).");
}
/** {@inheritDoc} */
@Override public int getHoldability() throws SQLException {
ensureNotClosed();
return HOLD_CURSORS_OVER_COMMIT;
}
/** {@inheritDoc} */
@Override public Savepoint setSavepoint() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public Savepoint setSavepoint(String name) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void rollback(Savepoint savepoint) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void releaseSavepoint(Savepoint savepoint) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public Statement createStatement(int resSetType, int resSetConcurrency,
int resSetHoldability) throws SQLException {
ensureNotClosed();
if (resSetType != TYPE_FORWARD_ONLY)
throw new SQLFeatureNotSupportedException("Invalid result set type (only forward is supported.)");
if (resSetConcurrency != CONCUR_READ_ONLY)
throw new SQLFeatureNotSupportedException("Invalid concurrency (updates are not supported).");
if (resSetHoldability != HOLD_CURSORS_OVER_COMMIT)
throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported).");
JdbcStatement stmt = new JdbcStatement(this);
if (timeout > 0)
stmt.timeout(timeout);
return stmt;
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int resSetType, int resSetConcurrency,
int resSetHoldability) throws SQLException {
ensureNotClosed();
if (resSetType != TYPE_FORWARD_ONLY)
throw new SQLFeatureNotSupportedException("Invalid result set type (only forward is supported.)");
if (resSetConcurrency != CONCUR_READ_ONLY)
throw new SQLFeatureNotSupportedException("Invalid concurrency (updates are not supported).");
if (resSetHoldability != HOLD_CURSORS_OVER_COMMIT)
throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported).");
JdbcPreparedStatement stmt = new JdbcPreparedStatement(this, sql);
if (timeout > 0)
stmt.timeout(timeout);
return stmt;
}
/** {@inheritDoc} */
@Override public CallableStatement prepareCall(String sql, int resSetType, int resSetConcurrency,
int resSetHoldability) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Callable functions are not supported.");
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int[] colIndexes) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, String[] colNames) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public Clob createClob() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Blob createBlob() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public NClob createNClob() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public SQLXML createSQLXML() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public boolean isValid(int timeout) throws SQLException {
ensureNotClosed();
if (timeout < 0)
throw new SQLException("Invalid timeout: " + timeout);
try {
return client.compute().<Boolean>executeAsync(VALID_TASK_NAME, cacheName).get(timeout, SECONDS);
}
catch (GridClientDisconnectedException | GridClientFutureTimeoutException e) {
throw new SQLException("Failed to establish connection.", e);
}
catch (GridClientException ignored) {
return false;
}
}
/** {@inheritDoc} */
@Override public void setClientInfo(String name, String val) throws SQLClientInfoException {
throw new UnsupportedOperationException("Client info is not supported.");
}
/** {@inheritDoc} */
@Override public void setClientInfo(Properties props) throws SQLClientInfoException {
throw new UnsupportedOperationException("Client info is not supported.");
}
/** {@inheritDoc} */
@Override public String getClientInfo(String name) throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public Properties getClientInfo() throws SQLException {
ensureNotClosed();
return new Properties();
}
/** {@inheritDoc} */
@Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Struct createStruct(String typeName, Object[] attrs) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public <T> T unwrap(Class<T> iface) throws SQLException {
if (!isWrapperFor(iface))
throw new SQLException("Connection is not a wrapper for " + iface.getName());
return (T)this;
}
/** {@inheritDoc} */
@Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
return iface != null && iface == Connection.class;
}
/** {@inheritDoc} */
@Override public void setSchema(String schema) throws SQLException {
cacheName = schema;
}
/** {@inheritDoc} */
@Override public String getSchema() throws SQLException {
return cacheName;
}
/** {@inheritDoc} */
@Override public void abort(Executor executor) throws SQLException {
close();
}
/** {@inheritDoc} */
@Override public void setNetworkTimeout(Executor executor, int ms) throws SQLException {
if (ms < 0)
throw new IllegalArgumentException("Timeout is below zero: " + ms);
timeout = ms;
}
/** {@inheritDoc} */
@Override public int getNetworkTimeout() throws SQLException {
return timeout;
}
/**
* @return Ignite client.
*/
GridClient client() {
return client;
}
/**
* @return Cache name.
*/
String cacheName() {
return cacheName;
}
/**
* @return URL.
*/
String url() {
return url;
}
/**
* @return Node ID.
*/
UUID nodeId() {
return nodeId;
}
/**
* Ensures that connection is not closed.
*
* @throws SQLException If connection is closed.
*/
private void ensureNotClosed() throws SQLException {
if (closed)
throw new SQLException("Connection is closed.");
}
/**
* @return Internal statement.
* @throws SQLException In case of error.
*/
JdbcStatement createStatement0() throws SQLException {
return (JdbcStatement)createStatement();
}
}
|
modules/core/src/main/java/org/apache/ignite/internal/jdbc/JdbcConnection.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.jdbc;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.Executor;
import org.apache.ignite.internal.client.GridClient;
import org.apache.ignite.internal.client.GridClientConfiguration;
import org.apache.ignite.internal.client.GridClientDisconnectedException;
import org.apache.ignite.internal.client.GridClientException;
import org.apache.ignite.internal.client.GridClientFactory;
import org.apache.ignite.internal.client.GridClientFutureTimeoutException;
import static java.sql.ResultSet.CONCUR_READ_ONLY;
import static java.sql.ResultSet.HOLD_CURSORS_OVER_COMMIT;
import static java.sql.ResultSet.TYPE_FORWARD_ONLY;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.ignite.IgniteJdbcDriver.PROP_CACHE;
import static org.apache.ignite.IgniteJdbcDriver.PROP_HOST;
import static org.apache.ignite.IgniteJdbcDriver.PROP_NODE_ID;
import static org.apache.ignite.IgniteJdbcDriver.PROP_PORT;
/**
* JDBC connection implementation.
*
* @deprecated Using Ignite client node based JDBC driver is preferable.
* See documentation of {@link org.apache.ignite.IgniteJdbcDriver} for details.
*/
@Deprecated
public class JdbcConnection implements Connection {
/** Validation task name. */
private static final String VALID_TASK_NAME =
"org.apache.ignite.internal.processors.cache.query.jdbc.GridCacheQueryJdbcValidationTask";
/** Ignite client. */
private final GridClient client;
/** Cache name. */
private String cacheName;
/** Closed flag. */
private boolean closed;
/** URL. */
private String url;
/** Node ID. */
private UUID nodeId;
/** Timeout. */
private int timeout;
/**
* Creates new connection.
*
* @param url Connection URL.
* @param props Additional properties.
* @throws SQLException In case Ignite client failed to start.
*/
public JdbcConnection(String url, Properties props) throws SQLException {
assert url != null;
assert props != null;
this.url = url;
cacheName = props.getProperty(PROP_CACHE);
String nodeIdProp = props.getProperty(PROP_NODE_ID);
if (nodeIdProp != null)
nodeId = UUID.fromString(nodeIdProp);
try {
GridClientConfiguration cfg = new GridClientConfiguration(props);
cfg.setServers(Collections.singleton(props.getProperty(PROP_HOST) + ":" + props.getProperty(PROP_PORT)));
// Disable all fetching and caching for metadata.
cfg.setEnableMetricsCache(false);
cfg.setEnableAttributesCache(false);
cfg.setAutoFetchMetrics(false);
cfg.setAutoFetchAttributes(false);
client = GridClientFactory.start(cfg);
}
catch (GridClientException e) {
throw new SQLException("Failed to start Ignite client.", e);
}
if (!isValid(2))
throw new SQLException("Client is invalid. Probably cache name is wrong.");
}
/** {@inheritDoc} */
@Override public Statement createStatement() throws SQLException {
return createStatement(TYPE_FORWARD_ONLY, CONCUR_READ_ONLY, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql) throws SQLException {
ensureNotClosed();
return prepareStatement(sql, TYPE_FORWARD_ONLY, CONCUR_READ_ONLY, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public CallableStatement prepareCall(String sql) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Callable functions are not supported.");
}
/** {@inheritDoc} */
@Override public String nativeSQL(String sql) throws SQLException {
ensureNotClosed();
return sql;
}
/** {@inheritDoc} */
@Override public void setAutoCommit(boolean autoCommit) throws SQLException {
ensureNotClosed();
if (!autoCommit)
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public boolean getAutoCommit() throws SQLException {
ensureNotClosed();
return true;
}
/** {@inheritDoc} */
@Override public void commit() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void rollback() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void close() throws SQLException {
if (closed)
return;
closed = true;
GridClientFactory.stop(client.id(), false);
}
/** {@inheritDoc} */
@Override public boolean isClosed() throws SQLException {
return closed;
}
/** {@inheritDoc} */
@Override public DatabaseMetaData getMetaData() throws SQLException {
ensureNotClosed();
return new JdbcDatabaseMetadata(this);
}
/** {@inheritDoc} */
@Override public void setReadOnly(boolean readOnly) throws SQLException {
ensureNotClosed();
if (!readOnly)
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public boolean isReadOnly() throws SQLException {
ensureNotClosed();
return true;
}
/** {@inheritDoc} */
@Override public void setCatalog(String catalog) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Catalogs are not supported.");
}
/** {@inheritDoc} */
@Override public String getCatalog() throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public void setTransactionIsolation(int level) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public int getTransactionIsolation() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public SQLWarning getWarnings() throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public void clearWarnings() throws SQLException {
ensureNotClosed();
}
/** {@inheritDoc} */
@Override public Statement createStatement(int resSetType, int resSetConcurrency) throws SQLException {
return createStatement(resSetType, resSetConcurrency, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int resSetType,
int resSetConcurrency) throws SQLException {
ensureNotClosed();
return prepareStatement(sql, resSetType, resSetConcurrency, HOLD_CURSORS_OVER_COMMIT);
}
/** {@inheritDoc} */
@Override public CallableStatement prepareCall(String sql, int resSetType,
int resSetConcurrency) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Callable functions are not supported.");
}
/** {@inheritDoc} */
@Override public Map<String, Class<?>> getTypeMap() throws SQLException {
throw new SQLFeatureNotSupportedException("Types mapping is not supported.");
}
/** {@inheritDoc} */
@Override public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Types mapping is not supported.");
}
/** {@inheritDoc} */
@Override public void setHoldability(int holdability) throws SQLException {
ensureNotClosed();
if (holdability != HOLD_CURSORS_OVER_COMMIT)
throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported).");
}
/** {@inheritDoc} */
@Override public int getHoldability() throws SQLException {
ensureNotClosed();
return HOLD_CURSORS_OVER_COMMIT;
}
/** {@inheritDoc} */
@Override public Savepoint setSavepoint() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public Savepoint setSavepoint(String name) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void rollback(Savepoint savepoint) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public void releaseSavepoint(Savepoint savepoint) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Transactions are not supported.");
}
/** {@inheritDoc} */
@Override public Statement createStatement(int resSetType, int resSetConcurrency,
int resSetHoldability) throws SQLException {
ensureNotClosed();
if (resSetType != TYPE_FORWARD_ONLY)
throw new SQLFeatureNotSupportedException("Invalid result set type (only forward is supported.)");
if (resSetConcurrency != CONCUR_READ_ONLY)
throw new SQLFeatureNotSupportedException("Invalid concurrency (updates are not supported).");
if (resSetHoldability != HOLD_CURSORS_OVER_COMMIT)
throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported).");
JdbcStatement stmt = new JdbcStatement(this);
if (timeout > 0)
stmt.timeout(timeout);
return stmt;
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int resSetType, int resSetConcurrency,
int resSetHoldability) throws SQLException {
ensureNotClosed();
if (resSetType != TYPE_FORWARD_ONLY)
throw new SQLFeatureNotSupportedException("Invalid result set type (only forward is supported.)");
if (resSetConcurrency != CONCUR_READ_ONLY)
throw new SQLFeatureNotSupportedException("Invalid concurrency (updates are not supported).");
if (resSetHoldability != HOLD_CURSORS_OVER_COMMIT)
throw new SQLFeatureNotSupportedException("Invalid holdability (transactions are not supported).");
JdbcPreparedStatement stmt = new JdbcPreparedStatement(this, sql);
if (timeout > 0)
stmt.timeout(timeout);
return stmt;
}
/** {@inheritDoc} */
@Override public CallableStatement prepareCall(String sql, int resSetType, int resSetConcurrency,
int resSetHoldability) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Callable functions are not supported.");
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, int[] colIndexes) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public PreparedStatement prepareStatement(String sql, String[] colNames) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public Clob createClob() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Blob createBlob() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public NClob createNClob() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public SQLXML createSQLXML() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public boolean isValid(int timeout) throws SQLException {
ensureNotClosed();
if (timeout < 0)
throw new SQLException("Invalid timeout: " + timeout);
try {
return client.compute().<Boolean>executeAsync(VALID_TASK_NAME, cacheName).get(timeout, SECONDS);
}
catch (GridClientDisconnectedException | GridClientFutureTimeoutException e) {
throw new SQLException("Failed to establish connection.", e);
}
catch (GridClientException ignored) {
return false;
}
}
/** {@inheritDoc} */
@Override public void setClientInfo(String name, String val) throws SQLClientInfoException {
throw new UnsupportedOperationException("Client info is not supported.");
}
/** {@inheritDoc} */
@Override public void setClientInfo(Properties props) throws SQLClientInfoException {
throw new UnsupportedOperationException("Client info is not supported.");
}
/** {@inheritDoc} */
@Override public String getClientInfo(String name) throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public Properties getClientInfo() throws SQLException {
ensureNotClosed();
return new Properties();
}
/** {@inheritDoc} */
@Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Struct createStruct(String typeName, Object[] attrs) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public <T> T unwrap(Class<T> iface) throws SQLException {
if (!isWrapperFor(iface))
throw new SQLException("Connection is not a wrapper for " + iface.getName());
return (T)this;
}
/** {@inheritDoc} */
@Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
return iface != null && iface == Connection.class;
}
/** {@inheritDoc} */
@Override public void setSchema(String schema) throws SQLException {
cacheName = schema;
}
/** {@inheritDoc} */
@Override public String getSchema() throws SQLException {
return cacheName;
}
/** {@inheritDoc} */
@Override public void abort(Executor executor) throws SQLException {
close();
}
/** {@inheritDoc} */
@Override public void setNetworkTimeout(Executor executor, int ms) throws SQLException {
if (ms < 0)
throw new IllegalArgumentException("Timeout is below zero: " + ms);
timeout = ms;
}
/** {@inheritDoc} */
@Override public int getNetworkTimeout() throws SQLException {
return timeout;
}
/**
* @return Ignite client.
*/
GridClient client() {
return client;
}
/**
* @return Cache name.
*/
String cacheName() {
return cacheName;
}
/**
* @return URL.
*/
String url() {
return url;
}
/**
* @return Node ID.
*/
UUID nodeId() {
return nodeId;
}
/**
* Ensures that connection is not closed.
*
* @throws SQLException If connection is closed.
*/
private void ensureNotClosed() throws SQLException {
if (closed)
throw new SQLException("Connection is closed.");
}
/**
* @return Internal statement.
* @throws SQLException In case of error.
*/
JdbcStatement createStatement0() throws SQLException {
return (JdbcStatement)createStatement();
}
}
|
JDBC driver authentication fixed
|
modules/core/src/main/java/org/apache/ignite/internal/jdbc/JdbcConnection.java
|
JDBC driver authentication fixed
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.