index int64 | repo_id string | file_path string | content string |
|---|---|---|---|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/autodiscover/AutoDiscoverMemberType.java | package ai.timefold.solver.core.api.domain.autodiscover;
import ai.timefold.solver.core.api.domain.solution.ConstraintWeightOverrides;
import ai.timefold.solver.core.api.domain.solution.PlanningEntityCollectionProperty;
import ai.timefold.solver.core.api.domain.solution.PlanningEntityProperty;
import ai.timefold.solver.core.api.domain.solution.PlanningScore;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.solution.ProblemFactCollectionProperty;
import ai.timefold.solver.core.api.domain.solution.ProblemFactProperty;
/**
* Determines if and how to automatically presume {@link ConstraintWeightOverrides},
* {@link ProblemFactCollectionProperty}, {@link ProblemFactProperty}, {@link PlanningEntityCollectionProperty},
* {@link PlanningEntityProperty} and {@link PlanningScore} annotations on {@link PlanningSolution} members
* based on the member type.
*/
public enum AutoDiscoverMemberType {
/**
* Do not reflect.
*/
NONE,
/**
* Reflect over the fields and automatically behave as the appropriate annotation is there
* based on the field type.
*/
FIELD,
/**
* Reflect over the getter methods and automatically behave as the appropriate annotation is there
* based on the return type.
*/
GETTER;
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/common/DomainAccessType.java | package ai.timefold.solver.core.api.domain.common;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
/**
* Determines how members (fields and methods) of the domain (for example the {@link PlanningVariable planner variable})
* are accessed.
*/
public enum DomainAccessType {
/**
* Use reflection to read/write members (fields and methods) of the domain.
* <p>
* When used in a modulepath, the module must be open.
* When used in GraalVM, the domain must be open for reflection.
* <p>
* This is the default, except with timefold-solver-quarkus.
*/
REFLECTION,
/**
* Use Gizmo generated bytecode to access members (fields and methods) to avoid reflection
* for additional performance.
* <p>
* With timefold-solver-quarkus, this bytecode is generated at build time
* and it supports planning annotations on non-public members too.
* <p>
* Without timefold-solver-quarkus, this bytecode is generated at bootstrap runtime
* and you must add Gizmo in your classpath or modulepath
* and use planning annotations on public members only.
*/
GIZMO
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/common/package-info.java | @XmlSchema(
namespace = SolverConfig.XML_NAMESPACE,
elementFormDefault = XmlNsForm.QUALIFIED)
package ai.timefold.solver.core.api.domain.common;
import jakarta.xml.bind.annotation.XmlNsForm;
import jakarta.xml.bind.annotation.XmlSchema;
import ai.timefold.solver.core.config.solver.SolverConfig; |
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/constraintweight/ConstraintConfiguration.java | package ai.timefold.solver.core.api.domain.constraintweight;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.solution.ConstraintWeightOverrides;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
/**
* Allows end users to change the constraint weights, by not hard coding them.
* This annotation specifies that the class holds a number of {@link ConstraintWeight} annotated members.
* That class must also have a {@link ConstraintWeight weight} for each of the constraints.
* <p>
* A {@link PlanningSolution} has at most one field or property annotated with {@link ConstraintConfigurationProvider}
* with returns a type of the {@link ConstraintConfiguration} annotated class.
*
* @deprecated Use {@link ConstraintWeightOverrides} instead.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
@Target({ TYPE })
@Retention(RUNTIME)
public @interface ConstraintConfiguration {
/**
* The namespace of the constraints.
* <p>
* This is the default for every {@link ConstraintWeight#constraintPackage()} in the annotated class.
*
* @return defaults to the annotated class's package.
*/
String constraintPackage() default "";
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/constraintweight/ConstraintConfigurationProvider.java | package ai.timefold.solver.core.api.domain.constraintweight;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.solution.ConstraintWeightOverrides;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.solution.ProblemFactProperty;
/**
* Specifies that a property (or a field) on a {@link PlanningSolution} class is a {@link ConstraintConfiguration}.
* This property is automatically a {@link ProblemFactProperty} too, so no need to declare that explicitly.
* <p>
* The type of this property (or field) must have a {@link ConstraintConfiguration} annotation.
*
* @deprecated Use {@link ConstraintWeightOverrides} instead.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface ConstraintConfigurationProvider {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/constraintweight/ConstraintWeight.java | package ai.timefold.solver.core.api.domain.constraintweight;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.solution.ConstraintWeightOverrides;
import ai.timefold.solver.core.api.score.Score;
/**
* Specifies that a bean property (or a field) set the constraint weight and score level of a constraint.
* For example, with a constraint weight of {@code 2soft},
* a constraint match penalization with weightMultiplier {@code 3}
* will result in a {@link Score} of {@code -6soft}.
* <p>
* It is specified on a getter of a java bean property (or directly on a field) of a {@link ConstraintConfiguration} class.
*
* @deprecated Use {@link ConstraintWeightOverrides} instead.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
@Target({ FIELD, METHOD })
@Retention(RUNTIME)
public @interface ConstraintWeight {
/**
* The constraint package is the namespace of the constraint.
* <p>
* The constraint id is this constraint package
* concatenated with "/" and {@link #value() the constraint name}.
*
* @return defaults to {@link ConstraintConfiguration#constraintPackage()}
*/
String constraintPackage() default "";
/**
* The constraint name.
* <p>
* The constraint id is {@link #constraintPackage() the constraint package}
* concatenated with "/" and this constraint name.
*
* @return never null, often a constant that is used by the constraints too, because they need to match.
*/
String value();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/entity/PinningFilter.java | package ai.timefold.solver.core.api.domain.entity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import org.jspecify.annotations.NullMarked;
/**
* Decides on accepting or discarding a {@link PlanningEntity}.
* A pinned {@link PlanningEntity}'s planning variables are never changed.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Entity_> the entity type, the class with the {@link PlanningEntity} annotation
* @deprecated Use {@link PlanningPin} instead.
*/
@Deprecated(forRemoval = true, since = "1.23.0")
@NullMarked
public interface PinningFilter<Solution_, Entity_> {
/**
* @param solution working solution to which the entity belongs
* @param entity a {@link PlanningEntity}
* @return true if the entity it is pinned, false if the entity is movable.
*/
boolean accept(Solution_ solution, Entity_ entity);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/entity/PlanningEntity.java | package ai.timefold.solver.core.api.domain.entity;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Comparator;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import ai.timefold.solver.core.impl.heuristic.selector.common.decorator.SelectionSorterWeightFactory;
/**
* Specifies that the class is a planning entity.
* There are two types of entities:
*
* <dl>
* <dt>Genuine entity</dt>
* <dd>Must have at least 1 genuine {@link PlanningVariable planning variable},
* and 0 or more shadow variables.</dd>
* <dt>Shadow entity</dt>
* <dd>Must have at least 1 shadow variable, and no genuine variables.</dd>
* </dl>
*
* If a planning entity has neither a genuine nor a shadow variable,
* it is not a planning entity and the solver will fail fast.
*
* <p>
* The class should have a public no-arg constructor, so it can be cloned
* (unless the {@link PlanningSolution#solutionCloner()} is specified).
*/
@Target({ TYPE })
@Retention(RUNTIME)
public @interface PlanningEntity {
/**
* A pinned planning entity is never changed during planning,
* this is useful in repeated planning use cases (such as continuous planning and real-time planning).
* This applies to all the planning variables of this planning entity.
* <p>
* The method {@link PinningFilter#accept(Object, Object)} returns false if the selection entity is pinned
* and it returns true if the selection entity is movable
*
* @return {@link NullPinningFilter} when it is null (workaround for annotation limitation)
* @deprecated Prefer using {@link PlanningPin}.
*/
@Deprecated(forRemoval = true, since = "1.23.0")
Class<? extends PinningFilter> pinningFilter() default NullPinningFilter.class;
/**
* Workaround for annotation limitation in {@link #pinningFilter()}.
*
* @deprecated Prefer using {@link PlanningPin}.
*/
@Deprecated(forRemoval = true, since = "1.23.0")
interface NullPinningFilter extends PinningFilter {
}
/**
* Allows a collection of planning entities to be sorted by difficulty.
* A difficultyWeight estimates how hard is to plan a certain PlanningEntity.
* Some algorithms benefit from planning on more difficult planning entities first/last or from focusing on them.
* <p>
* The {@link Comparator} should sort in ascending difficulty
* (even though many optimization algorithms will reverse it).
* For example: sorting 3 processes on difficultly based on their RAM usage requirement:
* Process B (1GB RAM), Process A (2GB RAM), Process C (7GB RAM),
* <p>
* Do not use together with {@link #difficultyWeightFactoryClass()}.
*
* @return {@link NullDifficultyComparator} when it is null (workaround for annotation limitation)
* @see #difficultyWeightFactoryClass()
*/
Class<? extends Comparator> difficultyComparatorClass() default NullDifficultyComparator.class;
/** Workaround for annotation limitation in {@link #difficultyComparatorClass()}. */
interface NullDifficultyComparator extends Comparator {
}
/**
* The {@link SelectionSorterWeightFactory} alternative for {@link #difficultyComparatorClass()}.
* <p>
* Do not use together with {@link #difficultyComparatorClass()}.
*
* @return {@link NullDifficultyWeightFactory} when it is null (workaround for annotation limitation)
* @see #difficultyComparatorClass()
*/
Class<? extends SelectionSorterWeightFactory> difficultyWeightFactoryClass() default NullDifficultyWeightFactory.class;
/** Workaround for annotation limitation in {@link #difficultyWeightFactoryClass()}. */
interface NullDifficultyWeightFactory extends SelectionSorterWeightFactory {
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/entity/PlanningPin.java | package ai.timefold.solver.core.api.domain.entity;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.variable.PlanningListVariable;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
/**
* Specifies that a boolean property (or field) of a {@link PlanningEntity} determines if the planning entity is pinned.
* A pinned planning entity is never changed during planning;
* to change a pinned planning entity, even to make it not pinned anymore, trigger a {@link ProblemChange}.
* For example, it allows the user to pin a shift to a specific employee before solving
* and the solver will not undo that, regardless of the constraints.
* <p>
* The boolean is false if the planning entity is movable and true if the planning entity is pinned.
* <p>
* It applies to all the planning variables of that planning entity.
* If set on an entity with {@link PlanningListVariable},
* this will pin the entire list of planning values as well.
*
* @see PlanningPinToIndex Read more about how to only pin part of the planning list variable.
* @see ProblemChange Use ProblemChange to trigger pinning changes.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningPin {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/entity/PlanningPinToIndex.java | package ai.timefold.solver.core.api.domain.entity;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.variable.PlanningListVariable;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
/**
* Specifies that an {@code int} property (or field) of a {@link PlanningEntity} determines
* how far a {@link PlanningListVariable} is pinned.
* <p>
* This annotation can only be specified on a field of the same entity,
* which also specifies a {@link PlanningListVariable}.
* The annotated int field has the following semantics:
*
* <ul>
* <li>0: Pinning is disabled.
* All the values in the list can be removed,
* new values may be added anywhere in the list,
* values in the list may be reordered.</li>
* <li>Positive int: Values before this index in the list are pinned.
* No value can be added at those indexes,
* removed from them, or shuffled between them.
* Values on or after this index are not pinned
* and can be added, removed or shuffled freely.</li>
* <li>Positive int that exceeds the lists size: fail fast.</li>
* <li>Negative int: fail fast.</li>
* </ul>
*
* To pin the entire list and disallow any changes, use {@link PlanningPin} instead.
* The index must never change during planning; to change it, trigger a {@link ProblemChange}.
*
* <p>
* Example: Assuming a list of values {@code [A, B, C]}:
*
* <ul>
* <li>0 allows the entire list to be modified.</li>
* <li>1 pins {@code A}; rest of the list may be modified or added to.</li>
* <li>2 pins {@code A, B}; rest of the list may be modified or added to.</li>
* <li>3 pins {@code A, B, C}; the list can only be added to.</li>
* <li>4 fails fast as there is no such index in the list.</li>
* </ul>
*
* If the same entity also specifies a {@link PlanningPin} and the pin is enabled,
* any value of {@link PlanningPinToIndex} is ignored.
* In other words, enabling {@link PlanningPin} pins the entire list without exception.
*
* @see PlanningPin Pin the entire entity.
* @see ProblemChange Use ProblemChange to trigger pinning changes.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningPinToIndex {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/entity/package-info.java | /**
* Domain annotations and support classes for a planning entity.
*/
package ai.timefold.solver.core.api.domain.entity;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/lookup/LookUpStrategyType.java | package ai.timefold.solver.core.api.domain.lookup;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.ProblemFactCollectionProperty;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
/**
* Determines how {@link ScoreDirector#lookUpWorkingObject(Object)} maps
* a {@link ProblemFactCollectionProperty problem fact} or a {@link PlanningEntity planning entity}
* from an external copy to the internal one.
*
* @deprecated When multi-threaded solving, ensure your domain classes use @{@link PlanningId} instead.
*/
@Deprecated(forRemoval = true, since = "1.10.0")
public enum LookUpStrategyType {
/**
* Map by the same {@link PlanningId} field or method.
* If there is no such field or method,
* there is no mapping and {@link ScoreDirector#lookUpWorkingObject(Object)} must not be used.
* If there is such a field or method, but it returns null, it fails fast.
* <p>
* This is the default.
*/
PLANNING_ID_OR_NONE,
/**
* Map by the same {@link PlanningId} field or method.
* If there is no such field or method, it fails fast.
*/
PLANNING_ID_OR_FAIL_FAST,
/**
* Map by {@link Object#equals(Object) equals(Object)} and {@link Object#hashCode() hashCode()}.
* If any of these two methods is not overridden by the working object's class or some of its superclasses,
* {@link ScoreDirector#lookUpWorkingObject(Object)} must not be used because it cannot work correctly with
* {@link Object}'s equals and hashCode implementations.
*/
EQUALITY,
/**
* There is no mapping and {@link ScoreDirector#lookUpWorkingObject(Object)} must not be used.
*/
NONE;
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/lookup/PlanningId.java | package ai.timefold.solver.core.api.domain.lookup;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.ProblemFactCollectionProperty;
import ai.timefold.solver.core.api.domain.valuerange.ValueRangeProvider;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
import ai.timefold.solver.core.impl.heuristic.move.Move;
/**
* Specifies that a bean property (or a field) is the id to match
* when {@link ScoreDirector#lookUpWorkingObject(Object) locating}
* an externalObject (often from another {@link Thread} or JVM).
* Used during {@link Move} rebasing and in a {@link ProblemChange}.
* <p>
* It is specified on a getter of a java bean property (or directly on a field) of a {@link PlanningEntity} class,
* {@link ValueRangeProvider planning value} class or any {@link ProblemFactCollectionProperty problem fact} class.
* <p>
* The return type can be any {@link Comparable} type which overrides {@link Object#equals(Object)} and
* {@link Object#hashCode()}, and is usually {@link Long} or {@link String}.
* It must never return a null instance.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningId {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/ConstraintWeightOverrides.java | package ai.timefold.solver.core.api.domain.solution;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.stream.ConstraintProvider;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintBuilder;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintStream;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
import ai.timefold.solver.core.impl.domain.solution.DefaultConstraintWeightOverrides;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* Used to override constraint weights defined in Constraint Streams,
* e.g., in {@link UniConstraintStream#penalize(Score)}.
* To use,
* place a member (typically a field) of type {@link ConstraintWeightOverrides}
* in your {@link PlanningSolution}-annotated class.
* <p>
* Users should use {@link #of(Map)} to provide the actual constraint weights.
* Alternatively, a JSON serializers and deserializer may be defined to interact with a solution file.
* Once the constraint weights are set, they must remain constant throughout the solving process,
* or a {@link ProblemChange} needs to be triggered.
* <p>
* Zero-weight will be excluded from processing,
* and the solver will behave as if it did not exist in the {@link ConstraintProvider}.
* <p>
* There is no support for user-defined packages, which is a deprecated feature in itself.
* The constraint is assumed to be in the same package as the top-most class implementing this interface.
* It is therefore required that the constraints be built using {@link UniConstraintBuilder#asConstraint(String)},
* leaving the constraint package to its default value.
*
* @param <Score_>
*/
public interface ConstraintWeightOverrides<Score_ extends Score<Score_>> {
static <Score_ extends Score<Score_>> ConstraintWeightOverrides<Score_> none() {
return of(Collections.<String, Score_> emptyMap());
}
static <Score_ extends Score<Score_>> ConstraintWeightOverrides<Score_> of(Map<String, Score_> constraintWeightMap) {
return new DefaultConstraintWeightOverrides<>(constraintWeightMap);
}
/**
* Return a constraint weight for a particular constraint.
*
* @return null if the constraint name is not known
*/
@Nullable
Score_ getConstraintWeight(@NonNull String constraintName);
/**
* Returns all known constraints.
*
* @return All constraint names for which {@link #getConstraintWeight(String)} returns a non-null value.
*/
@NonNull
Set<String> getKnownConstraintNames();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/PlanningEntityCollectionProperty.java | package ai.timefold.solver.core.api.domain.solution;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
/**
* Specifies that a property (or a field) on a {@link PlanningSolution} class is a {@link Collection} of planning entities.
* <p>
* Every element in the planning entity collection should have the {@link PlanningEntity} annotation.
* Every element in the planning entity collection will be added to the {@link ScoreDirector}.
* <p>
* For solver reproducibility, the collection must have a deterministic, stable iteration order.
* It is recommended to use a {@link List}, {@link LinkedHashSet} or {@link SortedSet}.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningEntityCollectionProperty {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/PlanningEntityProperty.java | package ai.timefold.solver.core.api.domain.solution;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
/**
* Specifies that a property (or a field) on a {@link PlanningSolution} class is a planning entity.
* <p>
* The planning entity should have the {@link PlanningEntity} annotation.
* The planning entity will be added to the {@link ScoreDirector}.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningEntityProperty {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/PlanningScore.java | package ai.timefold.solver.core.api.domain.solution;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.score.IBendableScore;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.solver.Solver;
import ai.timefold.solver.core.impl.score.definition.ScoreDefinition;
/**
* Specifies that a property (or a field) on a {@link PlanningSolution} class holds the {@link Score} of that solution.
* <p>
* This property can be null if the {@link PlanningSolution} is uninitialized.
* <p>
* This property is modified by the {@link Solver},
* every time when the {@link Score} of this {@link PlanningSolution} has been calculated.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningScore {
/**
* Required for bendable scores.
* <p>
* For example with 3 hard levels, hard level 0 always outweighs hard level 1 which always outweighs hard level 2,
* which outweighs all the soft levels.
*
* @return 0 or higher if the {@link Score} is a {@link IBendableScore}, not used otherwise
*/
int bendableHardLevelsSize() default NO_LEVEL_SIZE;
/**
* Required for bendable scores.
* <p>
* For example with 3 soft levels, soft level 0 always outweighs soft level 1 which always outweighs soft level 2.
*
* @return 0 or higher if the {@link Score} is a {@link IBendableScore}, not used otherwise
*/
int bendableSoftLevelsSize() default NO_LEVEL_SIZE;
/** Workaround for annotation limitation in {@link #bendableHardLevelsSize()} and {@link #bendableSoftLevelsSize()}. */
int NO_LEVEL_SIZE = -1;
/**
* Overrides the default determined {@link ScoreDefinition} to implement a custom one.
* <p>
* If this is not specified, the {@link ScoreDefinition} is automatically determined
* based on the return type of the annotated property (or field) on a {@link PlanningSolution}.
*
* @deprecated Support for custom scores is deprecated and will be removed in Timefold Solver 2.0.
* @return {@link NullScoreDefinition} when it is null (workaround for annotation limitation)
*/
@Deprecated(forRemoval = true)
Class<? extends ScoreDefinition> scoreDefinitionClass() default NullScoreDefinition.class;
/** Workaround for annotation limitation in {@link #scoreDefinitionClass()}. */
@Deprecated(forRemoval = true)
interface NullScoreDefinition extends ScoreDefinition {
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/PlanningSolution.java | package ai.timefold.solver.core.api.domain.solution;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.autodiscover.AutoDiscoverMemberType;
import ai.timefold.solver.core.api.domain.lookup.LookUpStrategyType;
import ai.timefold.solver.core.api.domain.lookup.PlanningId;
import ai.timefold.solver.core.api.domain.solution.cloner.SolutionCloner;
import ai.timefold.solver.core.api.score.stream.ConstraintProvider;
import org.jspecify.annotations.NonNull;
/**
* Specifies that the class is a planning solution.
* A solution represents a problem and a possible solution of that problem.
* A possible solution does not need to be optimal or even feasible.
* A solution's planning variables might not be initialized (especially when delivered as a problem).
* <p>
* A solution is mutable.
* For scalability reasons (to facilitate incremental score calculation),
* the same solution instance (called the working solution per move thread) is continuously modified.
* It's cloned to recall the best solution.
* <p>
* Each planning solution must have exactly 1 {@link PlanningScore} property.
* <p>
* Each planning solution must have at least 1 {@link PlanningEntityCollectionProperty}
* or {@link PlanningEntityProperty} property.
* <p>
* Each planning solution is recommended to have 1 {@link ConstraintWeightOverrides} property too.
* This will make it easy for a solution to override constraint weights provided in {@link ConstraintProvider},
* in turn making it possible to run different solutions with a different balance of constraint weights.
* <p>
* Each planning solution used with ConstraintStream score calculation must have at least 1
* {@link ProblemFactCollectionProperty}
* or {@link ProblemFactProperty} property.
* <p>
* The class should have a public no-arg constructor, so it can be cloned
* (unless the {@link #solutionCloner()} is specified).
*/
@Target({ TYPE })
@Retention(RUNTIME)
public @interface PlanningSolution {
/**
* Enable reflection through the members of the class
* to automatically assume {@link PlanningScore}, {@link PlanningEntityCollectionProperty},
* {@link PlanningEntityProperty}, {@link ProblemFactCollectionProperty}, {@link ProblemFactProperty}
* and {@link ConstraintWeightOverrides} annotations based on the member type.
*
* <p>
* This feature is not supported under Quarkus.
* When using Quarkus,
* setting this to anything other than {@link AutoDiscoverMemberType#NONE} will result in a build-time exception.
*/
@NonNull
AutoDiscoverMemberType autoDiscoverMemberType() default AutoDiscoverMemberType.NONE;
/**
* Overrides the default {@link SolutionCloner} to implement a custom {@link PlanningSolution} cloning implementation.
* <p>
* If this is not specified, then the default reflection-based {@link SolutionCloner} is used,
* so you don't have to worry about it.
*
* @return {@link NullSolutionCloner} when it is null (workaround for annotation limitation)
*/
Class<? extends SolutionCloner> solutionCloner() default NullSolutionCloner.class;
/** Workaround for annotation limitation in {@link #solutionCloner()}. */
interface NullSolutionCloner extends SolutionCloner {
}
/**
* @deprecated When multi-threaded solving, ensure your domain classes use @{@link PlanningId} instead.
*/
@Deprecated(forRemoval = true, since = "1.10.0")
@NonNull
LookUpStrategyType lookUpStrategyType() default LookUpStrategyType.PLANNING_ID_OR_NONE;
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/ProblemFactCollectionProperty.java | package ai.timefold.solver.core.api.domain.solution;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.score.stream.ConstraintFactory;
import ai.timefold.solver.core.api.score.stream.ConstraintProvider;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
/**
* Specifies that a property (or a field) on a {@link PlanningSolution} class is a {@link Collection} of problem facts.
* A problem fact must not change during solving (except through a {@link ProblemChange} event).
* <p>
* The constraints in a {@link ConstraintProvider} rely on problem facts for {@link ConstraintFactory#forEach(Class)}.
* <p>
* Do not annotate {@link PlanningEntity planning entities} as problem facts:
* they are automatically available as facts for {@link ConstraintFactory#forEach(Class)}.
* <p>
* For solver reproducibility, the collection must have a deterministic, stable iteration order.
* It is recommended to use a {@link List}, {@link LinkedHashSet} or {@link SortedSet}.
*
* @see ProblemFactProperty
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface ProblemFactCollectionProperty {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/ProblemFactProperty.java | package ai.timefold.solver.core.api.domain.solution;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.score.stream.ConstraintFactory;
import ai.timefold.solver.core.api.score.stream.ConstraintProvider;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
/**
* Specifies that a property (or a field) on a {@link PlanningSolution} class is a problem fact.
* A problem fact must not change during solving (except through a {@link ProblemChange} event).
* <p>
* The constraints in a {@link ConstraintProvider} rely on problem facts for {@link ConstraintFactory#forEach(Class)}.
* <p>
* Do not annotate {@link PlanningEntity} or {@link ConstraintWeightOverrides} fields as a problem fact:
* they are automatically available as facts for {@link ConstraintFactory#forEach(Class)}.
*
* @see ProblemFactCollectionProperty
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface ProblemFactProperty {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/package-info.java | /**
* Domain annotations and support classes for a planning solution.
*/
package ai.timefold.solver.core.api.domain.solution;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/cloner/DeepPlanningClone.java | package ai.timefold.solver.core.api.domain.solution.cloner;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.Map;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
/**
* Marks a problem fact class as being required to be deep planning cloned.
* Not needed for a {@link PlanningSolution} or {@link PlanningEntity} because those are automatically deep cloned.
* <p>
* It can also mark a property (getter for a field) as being required to be deep planning cloned.
* This is especially useful for {@link Collection} (or {@link Map}) properties.
* Not needed for a {@link Collection} (or {@link Map}) property with a generic type of {@link PlanningEntity}
* or a class with a DeepPlanningClone annotation, because those are automatically deep cloned.
* Note: If it annotates a property (getter method for a field) returning {@link Collection} (or {@link Map}),
* it clones the {@link Collection} (or {@link Map}),
* but its elements (or keys and values) are only cloned if they are of a type that needs to be planning cloned.
* <p>
* This annotation is ignored if a custom {@link SolutionCloner} is set with {@link PlanningSolution#solutionCloner()}.
*/
@Target({ TYPE, METHOD, FIELD })
@Inherited
@Retention(RUNTIME)
public @interface DeepPlanningClone {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/cloner/SolutionCloner.java | package ai.timefold.solver.core.api.domain.solution.cloner;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import org.jspecify.annotations.NonNull;
/**
* Clones a {@link PlanningSolution} during planning.
* Used to remember the state of a good {@link PlanningSolution} so it can be recalled at a later time
* when the original {@link PlanningSolution} is already modified.
* Also used in population based heuristics to increase or repopulate the population.
* <p>
* Planning cloning is hard: avoid doing it yourself.
* <p>
* An implementing class must be thread-safe after initialization
* on account of partitioned search using the same cloner on multiple part threads.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
*/
public interface SolutionCloner<Solution_> {
/**
* Does a planning clone. The returned {@link PlanningSolution} clone must fulfill these requirements:
* <ul>
* <li>The clone must represent the same planning problem.
* Usually it reuses the same instances of the problem facts and problem fact collections as the {@code original}.
* </li>
* <li>The clone must have the same (equal) score as the {@code original}.
* </li>
* <li>The clone must use different, cloned instances of the entities and entity collections.
* If a cloned entity changes, the original must remain unchanged.
* If an entity is added or removed in a cloned {@link PlanningSolution},
* the original {@link PlanningSolution} must remain unchanged.</li>
* </ul>
* Note that a class might support more than 1 clone method: planning clone is just one of them.
* <p>
* This method is thread-safe.
*
* @param original the original {@link PlanningSolution}
* @return the cloned {@link PlanningSolution}
*/
@NonNull
Solution_ cloneSolution(@NonNull Solution_ original);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/solution/cloner/package-info.java | /**
* Planning cloning support.
*/
package ai.timefold.solver.core.api.domain.solution.cloner;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/valuerange/CountableValueRange.java | package ai.timefold.solver.core.api.domain.valuerange;
import java.util.Iterator;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/**
* A {@link ValueRange} that is ending. Therefore, it has a discrete (as in non-continuous) range.
* <p>
* Don't implement this interface directly.
* If you can't use a collection to store the values,
* use {@link ValueRangeFactory} to get an instance of a {@link CountableValueRange}.
*
* @see ValueRangeFactory
* @see ValueRange
*/
@NullMarked
public interface CountableValueRange<T> extends ValueRange<T> {
/**
* Used by uniform random selection in a composite CountableValueRange,
* or one which includes nulls.
*
* @return the exact number of elements generated by this {@link CountableValueRange}, always {@code >= 0}
*/
long getSize();
/**
* Used by uniform random selection in a composite CountableValueRange,
* or one which includes nulls.
*
* @param index always {@code <} {@link #getSize()}
* @return sometimes null (if {@link PlanningVariable#allowsUnassigned()} is true)
*/
@Nullable
T get(long index);
/**
* Select the elements in original (natural) order.
*/
Iterator<T> createOriginalIterator();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/valuerange/ValueRange.java | package ai.timefold.solver.core.api.domain.valuerange;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import ai.timefold.solver.core.api.domain.variable.PlanningListVariable;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/**
* A ValueRange is a set of a values for a {@link PlanningVariable} or {@link PlanningListVariable}.
* These values might be stored in memory as a {@link Collection} (usually a {@link List} or {@link Set}),
* but if the values are numbers, they can also be stored in memory by their bounds
* to use less memory and provide more opportunities.
* <p>
* ValueRange is stateless, and its contents must not depend on any planning variables.
* Implementations must be immutable.
* <p>
* Don't implement this interface directly.
* If you can't use a collection to store the values,
* use {@link ValueRangeFactory} to get an instance of a {@link CountableValueRange}.
*
* @see CountableValueRange
* @see ValueRangeProvider
* @see ValueRangeFactory
*/
@NullMarked
public interface ValueRange<T> {
/**
* In a {@link CountableValueRange}, this must be consistent with {@link CountableValueRange#getSize()}.
*
* @return true if the range is empty
*/
boolean isEmpty();
/**
* @param value sometimes null
* @return true if the ValueRange contains that value
*/
boolean contains(@Nullable T value);
/**
* Select in random order, but without shuffling the elements.
* Each element might be selected multiple times.
* Scales well because it does not require caching.
*
* @param workingRandom the {@link Random} to use when any random number is needed,
* so runs are reproducible.
*/
Iterator<T> createRandomIterator(Random workingRandom);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/valuerange/ValueRangeFactory.java | package ai.timefold.solver.core.api.domain.valuerange;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalUnit;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.bigdecimal.BigDecimalValueRange;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.biginteger.BigIntegerValueRange;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.primboolean.BooleanValueRange;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.primdouble.DoubleValueRange;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.primint.IntValueRange;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.primlong.LongValueRange;
import ai.timefold.solver.core.impl.domain.valuerange.buildin.temporal.TemporalValueRange;
import org.jspecify.annotations.NonNull;
/**
* Factory for {@link CountableValueRange}.
*/
public final class ValueRangeFactory {
/**
* Build a {@link CountableValueRange} of both {@code boolean} values.
*/
public static @NonNull CountableValueRange<Boolean> createBooleanValueRange() {
return new BooleanValueRange();
}
/**
* Build a {@link CountableValueRange} of all {@code int} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
*/
public static @NonNull CountableValueRange<Integer> createIntValueRange(int from, int to) {
return new IntValueRange(from, to);
}
/**
* Build a {@link CountableValueRange} of a subset of {@code int} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnit {@code > 0}
*/
public static @NonNull CountableValueRange<Integer> createIntValueRange(int from, int to, int incrementUnit) {
return new IntValueRange(from, to, incrementUnit);
}
/**
* Build a {@link CountableValueRange} of all {@code long} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
*/
public static @NonNull CountableValueRange<Long> createLongValueRange(long from, long to) {
return new LongValueRange(from, to);
}
/**
* Build a {@link CountableValueRange} of a subset of {@code long} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnit {@code > 0}
*/
public static @NonNull CountableValueRange<Long> createLongValueRange(long from, long to, long incrementUnit) {
return new LongValueRange(from, to, incrementUnit);
}
/**
* Build an uncountable {@link ValueRange} of all {@code double} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @deprecated Prefer {@link #createBigDecimalValueRange(BigDecimal, BigDecimal)}.
*/
@Deprecated(forRemoval = true, since = "1.1.0")
public static @NonNull ValueRange<Double> createDoubleValueRange(double from, double to) {
return new DoubleValueRange(from, to);
}
/**
* Build a {@link CountableValueRange} of all {@link BigInteger} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
*/
public static @NonNull CountableValueRange<BigInteger> createBigIntegerValueRange(@NonNull BigInteger from,
@NonNull BigInteger to) {
return new BigIntegerValueRange(from, to);
}
/**
* Build a {@link CountableValueRange} of a subset of {@link BigInteger} values between 2 bounds.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnit {@code > 0}
*/
public static @NonNull CountableValueRange<BigInteger> createBigIntegerValueRange(@NonNull BigInteger from,
@NonNull BigInteger to,
@NonNull BigInteger incrementUnit) {
return new BigIntegerValueRange(from, to, incrementUnit);
}
/**
* Build a {@link CountableValueRange} of all {@link BigDecimal} values (of a specific scale) between 2 bounds.
* All parameters must have the same {@link BigDecimal#scale()}.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
*/
public static @NonNull CountableValueRange<BigDecimal> createBigDecimalValueRange(@NonNull BigDecimal from,
@NonNull BigDecimal to) {
return new BigDecimalValueRange(from, to);
}
/**
* Build a {@link CountableValueRange} of a subset of {@link BigDecimal} values (of a specific scale) between 2 bounds.
* All parameters must have the same {@link BigDecimal#scale()}.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnit {@code > 0}
*/
public static @NonNull CountableValueRange<BigDecimal> createBigDecimalValueRange(@NonNull BigDecimal from,
@NonNull BigDecimal to,
@NonNull BigDecimal incrementUnit) {
return new BigDecimalValueRange(from, to, incrementUnit);
}
/**
* Build a {@link CountableValueRange} of a subset of {@link LocalDate} values between 2 bounds.
* <p>
* Facade for {@link #createTemporalValueRange(Temporal, Temporal, long, TemporalUnit)}.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnitAmount {@code > 0}
* @param incrementUnitType must be {@link LocalDate#isSupported(TemporalUnit) supported}
*/
public static @NonNull CountableValueRange<LocalDate> createLocalDateValueRange(
@NonNull LocalDate from, @NonNull LocalDate to, long incrementUnitAmount, @NonNull TemporalUnit incrementUnitType) {
return createTemporalValueRange(from, to, incrementUnitAmount, incrementUnitType);
}
/**
* Build a {@link CountableValueRange} of a subset of {@link LocalTime} values between 2 bounds.
* <p>
* Facade for {@link #createTemporalValueRange(Temporal, Temporal, long, TemporalUnit)}.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnitAmount {@code > 0}
* @param incrementUnitType must be {@link LocalTime#isSupported(TemporalUnit) supported}
*/
public static CountableValueRange<LocalTime> createLocalTimeValueRange(
@NonNull LocalTime from, @NonNull LocalTime to, long incrementUnitAmount, @NonNull TemporalUnit incrementUnitType) {
return createTemporalValueRange(from, to, incrementUnitAmount, incrementUnitType);
}
/**
* Build a {@link CountableValueRange} of a subset of {@link LocalDateTime} values between 2 bounds.
* <p>
* Facade for {@link #createTemporalValueRange(Temporal, Temporal, long, TemporalUnit)}.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnitAmount {@code > 0}
* @param incrementUnitType must be {@link LocalDateTime#isSupported(TemporalUnit) supported}
*/
public static CountableValueRange<LocalDateTime> createLocalDateTimeValueRange(
@NonNull LocalDateTime from, @NonNull LocalDateTime to, long incrementUnitAmount,
@NonNull TemporalUnit incrementUnitType) {
return createTemporalValueRange(from, to, incrementUnitAmount, incrementUnitType);
}
/**
* Build a {@link CountableValueRange} of a subset of {@link Temporal} values (such as {@link LocalDate} or
* {@link LocalDateTime}) between 2 bounds.
* All parameters must have the same {@link TemporalUnit}.
*
* @param from inclusive minimum
* @param to exclusive maximum, {@code >= from}
* @param incrementUnitAmount {@code > 0}
* @param incrementUnitType must be {@link Temporal#isSupported(TemporalUnit) supported} by {@code from} and
* {@code to}
*/
public static <Temporal_ extends Temporal & Comparable<? super Temporal_>> @NonNull CountableValueRange<Temporal_>
createTemporalValueRange(@NonNull Temporal_ from, @NonNull Temporal_ to, long incrementUnitAmount,
@NonNull TemporalUnit incrementUnitType) {
return new TemporalValueRange<>(from, to, incrementUnitAmount, incrementUnitType);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/valuerange/ValueRangeProvider.java | package ai.timefold.solver.core.api.domain.valuerange;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.variable.PlanningListVariable;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import ai.timefold.solver.core.api.solver.SolverFactory;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
import org.jspecify.annotations.NonNull;
/**
* Provides the planning values that can be used for a {@link PlanningVariable}.
*
* <p>
* This is specified on a getter of a java bean property (or directly on a field)
* which returns a {@link Collection} or {@link ValueRange}.
* A {@link Collection} is implicitly converted to a {@link ValueRange}.
* For solver reproducibility, the collection must have a deterministic, stable iteration order.
* It is recommended to use a {@link List}, {@link LinkedHashSet} or {@link SortedSet}.
*
* <p>
* Value ranges are not allowed to contain {@code null} values.
* When {@link PlanningVariable#allowsUnassigned()} or {@link PlanningListVariable#allowsUnassignedValues()} is true,
* the solver will handle {@code null} values on its own.
*
* <p>
* Value ranges are not allowed to contain multiple copies of the same object,
* as defined by {@code ==}.
* It is recommended that the value range never contains two objects
* that are equal according to {@link Object#equals(Object)},
* but this is not enforced to not depend on user-defined {@link Object#equals(Object)} implementations.
* Having duplicates in a value range can lead to unexpected behavior,
* and skews selection probabilities in random selection algorithms.
*
* <p>
* Value ranges are not allowed to change during solving.
* This is especially important for value ranges defined on {@link PlanningEntity}-annotated classes;
* these must never depend on any of that entity's variables, or on any other entity's variables.
* If you need to change a value range defined on an entity,
* trigger a {@link ProblemChange} for that entity or restart the solver with an updated planning solution.
* If you need to change a value range defined on a planning solution,
* restart the solver with a new planning solution.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface ValueRangeProvider {
/**
* Used by {@link PlanningVariable#valueRangeProviderRefs()}
* to map a {@link PlanningVariable} to a {@link ValueRangeProvider}.
* If not provided, an attempt will be made to find a matching {@link PlanningVariable} without a ref.
*
* @return if provided, must be unique across a {@link SolverFactory}
*/
@NonNull
String id() default "";
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/valuerange/package-info.java | /**
* Domain annotations and support classes for a planning value range.
*/
package ai.timefold.solver.core.api.domain.valuerange;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/AbstractVariableListener.java | package ai.timefold.solver.core.api.domain.variable;
import java.io.Closeable;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
import org.jspecify.annotations.NonNull;
/**
* Common ancestor for specialized planning variable listeners.
* <p>
* <strong>Do not implement this interface directly.</strong>
* Implement either {@link VariableListener} or {@link ListVariableListener}.
*
* @see VariableListener
* @see ListVariableListener
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Entity_> {@link PlanningEntity} on which the source variable is declared
*/
public interface AbstractVariableListener<Solution_, Entity_> extends Closeable {
void beforeEntityAdded(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity);
void afterEntityAdded(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity);
void beforeEntityRemoved(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity);
void afterEntityRemoved(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity);
/**
* Called when the entire working solution changes. In this event, the other before..()/after...() methods will not
* be called.
* At this point, implementations should clear state, if any.
*/
default void resetWorkingSolution(@NonNull ScoreDirector<Solution_> scoreDirector) {
// No need to do anything for stateless implementations.
}
/**
* Called before this {@link AbstractVariableListener} is thrown away and not used anymore.
*/
@Override
default void close() {
// No need to do anything for stateless implementations.
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/AnchorShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.solver.Solver;
/**
* Specifies that a bean property (or a field) is the anchor of a chained {@link PlanningVariable}, which implies it's a shadow
* variable.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface AnchorShadowVariable {
/**
* The source planning variable is a chained planning variable that leads to the anchor.
* <p>
* Both the genuine variable and the shadow variable should be consistent:
* if A chains to B, then A must have the same anchor as B (unless B is the anchor).
* <p>
* When the {@link Solver} changes a genuine variable, it adjusts the shadow variable accordingly.
* In practice, the {@link Solver} ignores shadow variables (except for consistency housekeeping).
*
* @return the variable property name on this entity class that leads to the anchor
*/
String sourceVariableName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/CascadingUpdateShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Specifies that a field may be updated by the target method when any of its variables change, genuine or shadow.
* <p>
* Automatically cascades change events to the subsequent elements of a {@link PlanningListVariable}.
* <p>
* A single listener is created
* to execute user-defined logic from the {@code targetMethod} after all variable changes have been applied.
* This means it will be the last step executed during the event lifecycle.
* <p>
* It can be applied in multiple fields to update various shadow variables.
* The user's logic is responsible for defining the order in which each variable is updated.
* <p>
* Distinct {@code targetMethod} can be defined, but there is no guarantee about the order in which they are executed.
* Therefore, caution is required when using multiple {@code targetMethod} per model.
* <p>
* Except for {@link PiggybackShadowVariable},
* the use of {@link CascadingUpdateShadowVariable} as a source for other variables,
* such as {@link ShadowVariable}, is not allowed.
* <p>
* Important: it must only change the shadow variable(s) for which it's configured.
* It should never change a genuine variable or a problem fact.
* It can change its shadow variable(s) on multiple entity instances
* (for example: an arrivalTime change affects all trailing entities too).
*/
@Target({ FIELD })
@Retention(RUNTIME)
public @interface CascadingUpdateShadowVariable {
/**
* The target method element.
* <p>
* Important: the method must be non-static and should not include any parameters.
* There are no restrictions regarding the method visibility.
* There is no restriction on the method's return type,
* but if it returns a value, it will be ignored and will not impact the listener's execution.
*
* @return method name of the source host element which will update the shadow variable
*/
String targetMethodName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/CustomShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
/**
* This annotation is deprecated. Below are the instructions on how to replace your {@code @CustomShadowVariable(...)}
* with either {@link ShadowVariable @ShadowVariable} or {@link PiggybackShadowVariable @PiggybackShadowVariable}.
* <p>
* If your {@code @CustomShadowVariable} uses the {@code variableListenerClass} attribute, then replace the annotation with one
* {@code @ShadowVariable} annotation for each source {@code @PlanningVariableReference}.
* <p>
* For example,
*
* <pre>
* @CustomShadowVariable(
* variableListenerClass = PredecessorsDoneDateUpdatingVariableListener.class,
* sources = {
* @PlanningVariableReference(variableName = "executionMode"),
* @PlanningVariableReference(variableName = "delay") })
* </pre>
*
* becomes:
*
* <pre>
* @ShadowVariable(
* variableListenerClass = PredecessorsDoneDateUpdatingVariableListener.class,
* sourceVariableName = "executionMode")
* @ShadowVariable(
* variableListenerClass = PredecessorsDoneDateUpdatingVariableListener.class,
* sourceVariableName = "delay")
* </pre>
* <p>
* If your {@code @CustomShadowVariable} uses the {@code variableListenerRef} attribute, then replace it with the
* {@code @PiggybackShadowVariable} annotation.
* <p>
* For example,
*
* <pre>
* @CustomShadowVariable(
* variableListenerRef = @PlanningVariableReference(variableName = "date"))
* </pre>
*
* becomes:
*
* <pre>
* @PiggybackShadowVariable(shadowVariableName = "date")
* </pre>
*
* Specifies that a bean property (or a field) is a custom shadow variable of 1 or more {@link PlanningVariable}s.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
*
* @deprecated Deprecated in favor of {@link ShadowVariable} (normal shadow variable with {@link #variableListenerClass()})
* and {@link PiggybackShadowVariable} (if {@link #variableListenerRef()} is used).
*/
@Deprecated(forRemoval = true)
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface CustomShadowVariable {
/**
* A {@link VariableListener} gets notified after a source planning variable has changed.
* That listener changes the shadow variable (often recursively on multiple planning entities) accordingly.
* Those shadow variables should make the score calculation more natural to write.
* <p>
* For example: VRP with time windows uses a {@link VariableListener} to update the arrival times
* of all the trailing entities when an entity is changed.
*
* @return never null (unless {@link #variableListenerRef()} is not null)
*/
Class<? extends VariableListener> variableListenerClass() default NullVariableListener.class;
/** Workaround for annotation limitation in {@link #variableListenerClass()}. */
interface NullVariableListener extends VariableListener {
}
/**
* The source variables (leaders) that trigger a change to this shadow variable (follower).
*
* @return never null (unless {@link #variableListenerRef()} is not null), at least 1
*/
PlanningVariableReference[] sources() default {};
/**
* Use this when this shadow variable is updated by the {@link VariableListener} of another {@link CustomShadowVariable}.
*
* @return null if (and only if) any of the other fields is non null.
*/
PlanningVariableReference variableListenerRef() default @PlanningVariableReference(variableName = "");
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/IndexShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.solver.Solver;
/**
* Specifies that a bean property (or a field) is an index of this planning value in another entity's
* {@link PlanningListVariable}.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
* <p>
* The source variable must be a {@link PlanningListVariable list variable}.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface IndexShadowVariable {
/**
* The source variable must be a {@link PlanningListVariable list variable}.
* <p>
* When the {@link Solver} changes a genuine variable, it adjusts the shadow variable accordingly.
* In practice, the {@link Solver} ignores shadow variables (except for consistency housekeeping).
*
* @return property name of the list variable that contains instances of this planning value
*/
String sourceVariableName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/InverseRelationShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.solver.Solver;
/**
* Specifies that a bean property (or a field) is the inverse of a {@link PlanningVariable}, which implies it's a shadow
* variable.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface InverseRelationShadowVariable {
/**
* In a bidirectional relationship, the shadow side (= the follower side) uses this property
* (and nothing else) to declare for which {@link PlanningVariable} (= the leader side) it is a shadow.
* <p>
* Both sides of a bidirectional relationship should be consistent: if A points to B, then B must point to A.
* <p>
* When the {@link Solver} changes a genuine variable, it adjusts the shadow variable accordingly.
* In practice, the {@link Solver} ignores shadow variables (except for consistency housekeeping).
*
* @return the variable property name on the opposite end of this bidirectional relationship
*/
String sourceVariableName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/ListVariableListener.java | package ai.timefold.solver.core.api.domain.variable;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
import org.jspecify.annotations.NonNull;
/**
* A listener sourced on a {@link PlanningListVariable}.
* <p>
* Changes shadow variables when a genuine source list variable changes.
* <p>
* Important: it must only change the shadow variable(s) for which it's configured!
* It should never change a genuine variable or a problem fact.
* It can change its shadow variable(s) on multiple entity instances
* (for example: an arrivalTime change affects all trailing entities too).
* <p>
* It is recommended to keep implementations stateless.
* If state must be implemented, implementations may need to override the default methods
* ({@link #resetWorkingSolution(ScoreDirector)}, {@link #close()}).
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Entity_> {@link PlanningEntity} on which the source variable is declared
* @param <Element_> the type of elements of the source list variable
*/
public interface ListVariableListener<Solution_, Entity_, Element_> extends AbstractVariableListener<Solution_, Entity_> {
/**
* The listener must unset all shadow variables it is responsible for when an element is unassigned from the source list
* variable. For example, a {@code Task}'s {@code startTime} shadow variable must be reset to {@code null} after a task
* is unassigned from {@code Employee.tasks} when the move that assigned it there is undone during Construction Heuristic
* phase.
*
* @param scoreDirector score director
* @param element the unassigned element
*/
void afterListVariableElementUnassigned(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Element_ element);
/**
* Tells the listener that some elements within the range starting at {@code fromIndex} (inclusive) and ending at
* {@code toIndex} (exclusive) will change.
* Be aware that the {@link #afterListVariableChanged} call after the change is done often has a different
* {@code fromIndex} and {@code toIndex} because the number of elements in the list variable can change.
*
* <p>
* The list variable change includes:
* <ul>
* <li>Changing position (index) of one or more elements.</li>
* <li>Removing one or more elements from the list variable.</li>
* <li>Adding one or more elements to the list variable.</li>
* <li>Any mix of the above.</li>
* </ul>
*
* <p>
* The range has the following properties:
* <ol>
* <li>{@code fromIndex} is greater than or equal to 0; {@code toIndex} is less than or equal to the list variable
* size.</li>
* <li>{@code toIndex} is greater than or equal to {@code fromIndex}.</li>
* <li>The range contains all elements that are going to be changed.</li>
* <li>The range may contain elements that are not going to be changed.</li>
* <li>The range may be empty ({@code fromIndex} equals {@code toIndex}) if none of the existing list variable elements
* are going to be changed.</li>
* </ol>
*
* @param scoreDirector score director
* @param entity entity with the changed list variable
* @param fromIndex low endpoint (inclusive) of the changed range
* @param toIndex high endpoint (exclusive) of the changed range
*/
void beforeListVariableChanged(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity, int fromIndex,
int toIndex);
/**
* Tells the listener that some elements within the range starting at {@code fromIndex} (inclusive) and ending at
* {@code toIndex} (exclusive) changed.
* <p>
* The list variable change includes:
* <ul>
* <li>Changing position (index) of one or more elements.</li>
* <li>Removing one or more elements from the list variable.</li>
* <li>Adding one or more elements to the list variable.</li>
* <li>Any mix of the above.</li>
* </ul>
*
* <p>
* The range has the following properties:
* <ol>
* <li>{@code fromIndex} is greater than or equal to 0; {@code toIndex} is less than or equal to the list variable
* size.</li>
* <li>{@code toIndex} is greater than or equal to {@code fromIndex}.</li>
* <li>The range contains all elements that have changed.</li>
* <li>The range may contain elements that have not changed.</li>
* <li>The range may be empty ({@code fromIndex} equals {@code toIndex}) if none of the existing list variable elements
* have changed.</li>
* </ol>
*
* @param scoreDirector score director
* @param entity entity with the changed list variable
* @param fromIndex low endpoint (inclusive) of the changed range
* @param toIndex high endpoint (exclusive) of the changed range
*/
void afterListVariableChanged(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity, int fromIndex,
int toIndex);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/NextElementShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.solver.Solver;
/**
* Specifies that a bean property (or a field) references the next element in the same {@link PlanningListVariable}.
* The next element's index is 1 higher than this element's index.
* It is {@code null} if this element is the last element in the list variable.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
* <p>
* The source variable must be a {@link PlanningListVariable list variable}.
*/
// TODO When a non-disjoint list variable is supported, specify that this annotation is only allowed on disjoint list variables.
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface NextElementShadowVariable {
/**
* The source variable must be a {@link PlanningListVariable list variable}.
* <p>
* When the {@link Solver} changes a genuine variable, it adjusts the shadow variable accordingly.
* In practice, the {@link Solver} ignores shadow variables (except for consistency housekeeping).
*
* @return property name of the list variable that contains instances of this planning value
*/
String sourceVariableName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/PiggybackShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
/**
* Specifies that a bean property (or a field) is a custom shadow variable that is updated by another shadow variable's
* variable listener.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PiggybackShadowVariable {
/**
* The {@link PlanningEntity} class of the shadow variable with a variable listener.
* <p>
* Specified if the referenced shadow variable is on a different {@link Class} than the class that uses this annotation.
*
* @return {@link NullEntityClass} when it is null (workaround for annotation limitation).
* Defaults to the same {@link Class} as the one that uses this annotation.
*/
Class<?> shadowEntityClass() default NullEntityClass.class;
/**
* The shadow variable name.
*
* @return never null, a genuine or shadow variable name
*/
String shadowVariableName();
/** Workaround for annotation limitation in {@link #shadowEntityClass()}. */
interface NullEntityClass {
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/PlanningListVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.List;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.entity.PlanningPin;
import ai.timefold.solver.core.api.domain.entity.PlanningPinToIndex;
/**
* Specifies that a bean property (or a field) can be changed and should be optimized by the optimization algorithms.
* It is specified on a getter of a java bean property (or directly on a field) of a {@link PlanningEntity} class.
* The type of the {@link PlanningListVariable} annotated bean property (or a field) must be {@link List}.
*
* <h2>List variable</h2>
* <p>
* A planning entity's property annotated with {@code @PlanningListVariable} is referred to as a <strong>list variable</strong>.
* The way solver optimizes a list variable is by adding, removing, or changing order of elements in the {@code List} object
* held by the list variable.
*
* <h2>Disjoint lists</h2>
* <p>
* Furthermore, the current implementation works under the assumption that the list variables of all entity instances
* are "disjoint lists":
* <ul>
* <li><strong>List</strong> means that the order of elements inside a list planning variable is significant.</li>
* <li><strong>Disjoint</strong> means that any given pair of entities have no common elements in their list variables.
* In other words, each element from the list variable's value range appears in exactly one entity's list variable.</li>
* </ul>
*
* <p>
* This makes sense for common use cases, for example the Vehicle Routing Problem or Task Assigning. In both cases
* the <em>order</em> in which customers are visited and tasks are being worked on matters. Also, each customer
* must be visited <em>once</em> and each task must be completed by <em>exactly one</em> employee.
*
* @see PlanningPin
* @see PlanningPinToIndex
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningListVariable {
/**
* If set to false (default), all elements must be assigned to some list.
* If set to true, elements may be left unassigned.
*
* @see PlanningVariable#allowsUnassigned() Basic planning value equivalent.
*/
boolean allowsUnassignedValues() default false;
String[] valueRangeProviderRefs() default {};
// TODO value comparison: https://issues.redhat.com/browse/PLANNER-2542
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/PlanningVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Comparator;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.valuerange.ValueRangeProvider;
import ai.timefold.solver.core.impl.heuristic.selector.common.decorator.SelectionSorterWeightFactory;
/**
* Specifies that a bean property (or a field) can be changed and should be optimized by the optimization algorithms.
* <p>
* The property must be an object type. Primitive types (such as int, double, long) are not allowed.
* <p>
* It is specified on a getter of a java bean property (or directly on a field) of a {@link PlanningEntity} class.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PlanningVariable {
/**
* Any {@link ValueRangeProvider} annotation on a {@link PlanningSolution} or {@link PlanningEntity}
* will automatically be registered with its {@link ValueRangeProvider#id()}.
* <p>
* If no refs are provided, all {@link ValueRangeProvider}s without an id will be registered,
* provided their return types match the type of this variable.
*
* @return 0 or more registered {@link ValueRangeProvider#id()}
*/
String[] valueRangeProviderRefs() default {};
/**
* A variable will automatically add the planning value null
* to the {@link ValueRangeProvider}'s range.
* <p>
* Allowing unassigned is not compatible with {@link PlanningVariableGraphType#CHAINED} true.
* Allowing unassigned is not compatible with a primitive property type.
*
* @see PlanningListVariable#allowsUnassignedValues()
* @return true if null is a valid value for this planning variable
*/
boolean allowsUnassigned() default false;
/**
* As defined by {@link #allowsUnassigned()}.
*
* @deprecated Use {@link #allowsUnassigned()} instead.
* @return true if null is a valid value for this planning variable
*/
@Deprecated(forRemoval = true, since = "1.8.0")
boolean nullable() default false;
/**
* In some use cases, such as Vehicle Routing, planning entities form a specific graph type,
* as specified by {@link PlanningVariableGraphType}.
*
* @return never null, defaults to {@link PlanningVariableGraphType#NONE}
*/
PlanningVariableGraphType graphType() default PlanningVariableGraphType.NONE;
/**
* Allows a collection of planning values for this variable to be sorted by strength.
* A strengthWeight estimates how strong a planning value is.
* Some algorithms benefit from planning on weaker planning values first or from focusing on them.
* <p>
* The {@link Comparator} should sort in ascending strength.
* For example: sorting 3 computers on strength based on their RAM capacity:
* Computer B (1GB RAM), Computer A (2GB RAM), Computer C (7GB RAM),
* <p>
* Do not use together with {@link #strengthWeightFactoryClass()}.
*
* @return {@link NullStrengthComparator} when it is null (workaround for annotation limitation)
* @see #strengthWeightFactoryClass()
*/
Class<? extends Comparator> strengthComparatorClass() default NullStrengthComparator.class;
/** Workaround for annotation limitation in {@link #strengthComparatorClass()}. */
interface NullStrengthComparator extends Comparator {
}
/**
* The {@link SelectionSorterWeightFactory} alternative for {@link #strengthComparatorClass()}.
* <p>
* Do not use together with {@link #strengthComparatorClass()}.
*
* @return {@link NullStrengthWeightFactory} when it is null (workaround for annotation limitation)
* @see #strengthComparatorClass()
*/
Class<? extends SelectionSorterWeightFactory> strengthWeightFactoryClass() default NullStrengthWeightFactory.class;
/** Workaround for annotation limitation in {@link #strengthWeightFactoryClass()}. */
interface NullStrengthWeightFactory extends SelectionSorterWeightFactory {
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/PlanningVariableGraphType.java | package ai.timefold.solver.core.api.domain.variable;
public enum PlanningVariableGraphType {
/**
* This is the default.
*/
NONE,
/**
* Changes to this variable need to trigger chain correction.
* <p>
* In some use cases, such as Vehicle Routing, planning entities are chained.
* A chained variable recursively points to a problem fact, which is called the anchor.
* So either it points directly to the anchor (that problem fact)
* or it points to another planning entity which recursively points to the anchor.
* Chains always have exactly 1 anchor, thus they never loop and the tail is always open.
* Chains never split into a tree: an anchor or planning entity has at most 1 trailing planning entity.
* <p>
* When a chained planning entity changes position, then chain correction must happen:
* <ul>
* <li>divert the chain link at the new position to go through the modified planning entity</li>
* <li>close the missing chain link at the old position</li>
* </ul>
* For example: Given {@code A <- B <- C <- D <- X <- Y}, when B moves between X and Y, pointing to X,
* then Y is also changed to point to B
* and C is also changed to point to A,
* giving the result {@code A <- C <- D <- X <- B <- Y}.
* <p>
* {@link PlanningVariable#allowsUnassigned()} true is not compatible with this.
*/
CHAINED;
// TODO TREE (DIRECTED_GRAPH)
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/PlanningVariableReference.java | package ai.timefold.solver.core.api.domain.variable;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
/**
* A reference to a genuine {@link PlanningVariable} or a shadow variable.
*/
public @interface PlanningVariableReference {
/**
* The {@link PlanningEntity} class of the planning variable.
* <p>
* Specified if the planning variable is on a different {@link Class}
* than the class that uses this referencing annotation.
*
* @return {@link NullEntityClass} when it is null (workaround for annotation limitation).
* Defaults to the same {@link Class} as the one that uses this annotation.
*/
Class<?> entityClass() default NullEntityClass.class;
/** Workaround for annotation limitation in {@link #entityClass()}. */
interface NullEntityClass {
}
/**
* The name of the planning variable that is referenced.
*
* @return never null, a genuine or shadow variable name
*/
String variableName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/PreviousElementShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.solver.Solver;
/**
* Specifies that a bean property (or a field) references the previous element in the same {@link PlanningListVariable}.
* The previous element's index is 1 lower than this element's index.
* It is {@code null} if this element is the first element in the list variable.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
* <p>
* The source variable must be a {@link PlanningListVariable list variable}.
*/
// TODO When a non-disjoint list variable is supported, specify that this annotation is only allowed on disjoint list variables.
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface PreviousElementShadowVariable {
/**
* The source variable must be a {@link PlanningListVariable list variable}.
* <p>
* When the {@link Solver} changes a genuine variable, it adjusts the shadow variable accordingly.
* In practice, the {@link Solver} ignores shadow variables (except for consistency housekeeping).
*
* @return property name of the list variable that contains instances of this planning value
*/
String sourceVariableName();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/ShadowSources.java | package ai.timefold.solver.core.api.domain.variable;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Specifies the paths to variables that a method referenced by {@link ShadowVariable#supplierName()}
* uses to compute the value of a {@link ShadowVariable}.
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface ShadowSources {
/**
* The paths to variables the method uses to compute the value of a {@link ShadowVariable#supplierName() declarative shadow
* variable}.
* <p>
* Each path is a {@link String} that is one of the following three forms:
*
* <ul>
* <li>
* "variableName", for referring any variable on the same planning entity.
* </li>
* <li>
* A list of names seperated by ".", such as "variableOrFact.fact.entity.variable",
* for referencing a variable accessible from the planning entity.
* The first property may be a fact or any non-declarative variable; the remaining properties before the end
* must be facts, and the final property must be a variable.
* For the path "a.b", it refers to the variable "b"
* on the property/variable "a" on the planning entity.
* In general, if you access a variable in your method using a chain like {@code a.b.c}, that
* chain should be included as a source.
* </li>
* <li>
* A list of names seperated by ".", followed by a name suffix by "[].",
* followed by either of the forms above.
* For example, "group[].previous".
* In this case, "group" is a {@link java.util.Collection} on the planning entity,
* and the annotated method uses the "previous" variable of each element in the
* collection.
* The collection must not change during solving and may be null.
* </li>
* </ul>
*
* For example, for this method
*
* <pre>
* {@code
* @InverseRelationShadowVariable
* Entity entity;
*
* @PreviousElementShadowVariable
* Value previous;
*
* @ShadowVariable(supplierName="startTimeSupplier")
* LocalDateTime startTime;
*
* @ShadowVariable(supplierName="endTimeSupplier")
* LocalDateTime endTime;
*
* Collection<Value> dependencies;
*
* @ShadowSources("previous.endTime", "entity", "dependencies[].endTime")
* public LocalDateTime startTimeSupplier() {
* LocalDateTime readyTime = null;
* if (previous != null) {
* readyTime = previous.endTime;
* } else if (entity != null) {
* readyTime = entity.startTime;
* } else {
* return null;
* }
* if (dependencies != null) {
* for (var dependency : dependencies) {
* if (dependency.endTime == null) {
* return null;
* }
* readyTime = (readyTime.isBefore(dependency.endTime)? dependency.endTime: readyTime;
* }
* }
* return readyTime;
* }
* }
* </pre>
*
* The value {@code { "previous.endTime", "entity", "dependencies[].endTime") }} is used
* for {@link ShadowSources} since it accesses
* the end time declarative shadow variable of its previous element variable ("previous.endTime"),
* a fact on its inverse relation variable ("entity"),
* and the end time declarative shadow variable on each element in its dependencies ("dependencies[].endTime").
* <p>
*
* @return A non-empty list of variables the supplier method accesses.
*/
String[] value();
/**
* If non-empty, this is name of a property on the entity that will be used
* to define a group of entities to align values for.
* <p>
* When the alignment key is null, the entity is considered independent and their alignment will
* not be aligned with any other entity.
* When the alignment key is non-null, the shadow variable will only be calculated
* for one entity with that alignment key, and all other entities with that alignment key will
* have their shadows set to that value.
* <p>
* When the alignment key is unspecified or null, the entity is not considered to
* be part of any alignment group and will not share variable calculations with other
* entities.
* <p>
* Important: the alignment key must not point to a planning variable and must not change during solving.
*
* @return The name of a property on the entity to use for value alignment, or the empty string to not align
* values with any other entity.
*/
String alignmentKey() default "";
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/ShadowVariable.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Repeatable;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.variable.ShadowVariable.List;
/**
* Specifies that a bean property (or a field) is a custom shadow variable of 1 or more source variables.
* The source variable may be a genuine {@link PlanningVariable}, {@link PlanningListVariable}, or another shadow variable.
* <p>
* It is specified on a getter of a java bean property (or a field) of a {@link PlanningEntity} class.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
@Repeatable(List.class)
public @interface ShadowVariable {
/**
* {@link ai.timefold.solver.core.config.solver.PreviewFeature Preview feature}.
* <p>
* If set, this {@link ShadowVariable} is a supplier variable, and it
* is a name of a method annotated with
* {@link ShadowSources} that computes the value of this
* {@link ShadowVariable}.
* <p>
* If set, {@link #variableListenerClass()}, {@link #sourceEntityClass()}
* and {@link #sourceVariableName()} must all be unset.
*
* @return the method that computes the value of this {@link ShadowVariable}.
*/
String supplierName() default "";
/**
* A {@link VariableListener} or {@link ListVariableListener} gets notified after a source planning variable has changed.
* That listener changes the shadow variable (often recursively on multiple planning entities) accordingly.
* Those shadow variables should make the score calculation more natural to write.
* <p>
* For example: VRP with time windows uses a {@link VariableListener} to update the arrival times
* of all the trailing entities when an entity is changed.
*
* Must not be set if {@link #supplierName()} is set.
*
* @return {@link NullVariableListener} when the attribute is omitted (workaround for annotation limitation).
* The variable listener class that computes the value of this shadow variable.
*/
Class<? extends AbstractVariableListener> variableListenerClass() default NullVariableListener.class;
/**
* The {@link PlanningEntity} class of the source variable.
* <p>
* Specified if the source variable is on a different {@link Class} than the class that uses this referencing annotation.
* <p>
* Must not be set if {@link #supplierName()} is set.
*
* @return {@link NullEntityClass} when the attribute is omitted (workaround for annotation limitation).
* Defaults to the same {@link Class} as the one that uses this annotation.
*/
Class<?> sourceEntityClass() default NullEntityClass.class;
/**
* The source variable name.
* <p>
* Must not be set if {@link #supplierName()} is set.
*
* @return never null, a genuine or shadow variable name
*/
String sourceVariableName() default "";
/**
* Defines several {@link ShadowVariable} annotations on the same element.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
@interface List {
ShadowVariable[] value();
}
/** Workaround for annotation limitation in {@link #variableListenerClass()}. */
interface NullVariableListener extends AbstractVariableListener<Object, Object> {
}
/** Workaround for annotation limitation in {@link #sourceEntityClass()}. */
interface NullEntityClass {
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/ShadowVariablesInconsistent.java | package ai.timefold.solver.core.api.domain.variable;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.stream.Constraint;
/**
* Specifies that a boolean property (or field) of a {@link PlanningEntity}
* tracks if any of its {@link ShadowVariable#supplierName() supplier variables}
* are inconsistent.
* <p>
* A supplier variable is inconsistent if:
* <ul>
* <li>
* One of its source variables include it as a source (for example,
* `a` depends on `b` and `b` depends on `a`).
* </li>
* <li>
* One of its source variables is inconsistent (for example,
* `c` depends on `a`, which depends on `b`, and `b` depends on `a`).
* </li>
* </ul>
* <p>
* Should be used in a filter for a hard {@link Constraint} to penalize
* inconsistent entities, since {@link PlanningSolution} with inconsistent entities are
* typically not valid.
* <p>
* There are three ways an inconsistency may be introduced:
*
* <ul>
* <li>
* Source-induced, when two declarative shadow variables' sources refer to each other:
*
* <pre>
* @PlanningEntity
* public class Entity {
* @ShadowVariable(supplierName = "variable1Supplier")
* String variable1;
*
* @ShadowVariable(supplierName = "variable2Supplier")
* String variable2;
*
* // ...
*
* @ShadowSources("variable2")
* String variable1Supplier() {
* // ...
* }
*
* @ShadowSources("variable1")
* String variable2Supplier() {
* // ...
* }
* }
* </pre>
*
* </li>
*
* <li>
* Fact-induced, when a shadow variable has itself as a direct or transitive dependency via a fact:
*
* <pre>
* @PlanningEntity
* public class Entity {
* Entity dependency;
*
* @ShadowVariable(supplierName = "variableSupplier")
* String variable;
*
* @ShadowSources("dependency.variable")
* String variableSupplier() {
* // ...
* }
* // ...
* }
*
* Entity a = new Entity();
* Entity b = new Entity();
* a.setDependency(b);
* b.setDependency(a);
* // a depends on b, and b depends on a, which is invalid.
* </pre>
*
* </li>
*
* <li>
* Variable-induced, when a shadow variable has itself as a direct or transitive dependency via a variable:
*
* <pre>
* @PlanningEntity
* public class Entity {
* Entity dependency;
*
* @PreviousElementShadowVariable()
* Entity previous;
*
* @ShadowVariable(supplierName = "variableSupplier")
* String variable;
*
* @ShadowSources({ "previous.variable", "dependency.variable" })
* String variableSupplier() {
* // ...
* }
* // ...
* }
*
* Entity a = new Entity();
* Entity b = new Entity();
* b.setDependency(a);
* a.setPrevious(b);
* // b depends on a via a fact, and a depends on b via a variable
* // The solver can break this loop by moving a after b.
* </pre>
*
* </li>
* </ul>
* Source-induced and fact-induced loops cannot be broken by the solver,
* and represents an issue in either the input problem or the domain model.
* The solver will fail-fast if it detects a source-induced or fact-induced loop.
* <p>
* Important:
* Do not use a {@link ShadowVariablesInconsistent} property in a method annotated
* with {@link ShadowSources}. {@link ShadowSources} marked methods do not need to check
* {@link ShadowVariablesInconsistent} properties, since they are only called if all
* their dependencies are consistent.
*/
@Target({ METHOD, FIELD })
@Retention(RUNTIME)
public @interface ShadowVariablesInconsistent {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/VariableListener.java | package ai.timefold.solver.core.api.domain.variable;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.director.ScoreDirector;
import org.jspecify.annotations.NonNull;
/**
* A listener sourced on a basic {@link PlanningVariable}.
* <p>
* Changes shadow variables when a source basic planning variable changes.
* The source variable can be either a genuine or a shadow variable.
* <p>
* Important: it must only change the shadow variable(s) for which it's configured!
* It should never change a genuine variable or a problem fact.
* It can change its shadow variable(s) on multiple entity instances
* (for example: an arrivalTime change affects all trailing entities too).
* <p>
* It is recommended to keep implementations stateless.
* If state must be implemented, implementations may need to override the default methods
* ({@link #resetWorkingSolution(ScoreDirector)}, {@link #close()}).
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Entity_> {@link PlanningEntity} on which the source variable is declared
*/
public interface VariableListener<Solution_, Entity_> extends AbstractVariableListener<Solution_, Entity_> {
/**
* When set to {@code true}, this has a performance loss.
* When set to {@code false}, it's easier to make the listener implementation correct and fast.
*
* @return true to guarantee that each of the before/after methods is only called once per entity instance
* per operation type (add, change or remove).
*/
default boolean requiresUniqueEntityEvents() {
return false;
}
void beforeVariableChanged(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity);
void afterVariableChanged(@NonNull ScoreDirector<Solution_> scoreDirector, @NonNull Entity_ entity);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/domain/variable/package-info.java | /**
* Domain annotations and support classes for a planning variable.
*/
package ai.timefold.solver.core.api.domain.variable;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/PentaFunction.java | package ai.timefold.solver.core.api.function;
import java.util.function.Function;
/**
* Represents a function that accepts five arguments and produces a result.
* This is the five-arity specialization of {@link Function}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #apply(Object, Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
* @param <D> the type of the fourth argument to the function
* @param <E> the type of the fifth argument to the function
* @param <R> the type of the result of the function
*
* @see Function
*/
@FunctionalInterface
public interface PentaFunction<A, B, C, D, E, R> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @param d the fourth function argument
* @param e the fifth function argument
* @return the function result
*/
R apply(A a, B b, C c, D d, E e);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/PentaPredicate.java | package ai.timefold.solver.core.api.function;
import java.util.Objects;
import java.util.function.Predicate;
import org.jspecify.annotations.NonNull;
/**
* Represents a predicate (boolean-valued function) of five arguments.
* This is the five-arity specialization of {@link Predicate}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #test(Object, Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the predicate
* @param <B> the type of the second argument the predicate
* @param <C> the type of the third argument the predicate
* @param <D> the type of the fourth argument the predicate
* @param <E> the type of the fifth argument the predicate
*
* @see Predicate
*/
@FunctionalInterface
public interface PentaPredicate<A, B, C, D, E> {
/**
* Evaluates this predicate on the given arguments.
*
* @param a the first input argument
* @param b the second input argument
* @param c the third input argument
* @param d the fourth input argument
* @param e the fifth input argument
* @return {@code true} if the input arguments match the predicate,
* otherwise {@code false}
*/
boolean test(A a, B b, C c, D d, E e);
/**
* Returns a composed predicate that represents a short-circuiting logical
* AND of this predicate and another. When evaluating the composed
* predicate, if this predicate is {@code false}, then the {@code other}
* predicate is not evaluated.
*
* <p>
* Any exceptions thrown during evaluation of either predicate are relayed
* to the caller; if evaluation of this predicate throws an exception, the
* {@code other} predicate will not be evaluated.
*
* @param other a predicate that will be logically-ANDed with this predicate
* @return a composed predicate that represents the short-circuiting logical
* AND of this predicate and the {@code other} predicate
* @throws NullPointerException if other is null
*/
default @NonNull PentaPredicate<A, B, C, D, E> and(
@NonNull PentaPredicate<? super A, ? super B, ? super C, ? super D, ? super E> other) {
Objects.requireNonNull(other);
return (A a, B b, C c, D d, E e) -> test(a, b, c, d, e) && other.test(a, b, c, d, e);
}
/**
* Returns a predicate that represents the logical negation of this
* predicate.
*
* @return a predicate that represents the logical negation of this
* predicate
*/
default @NonNull PentaPredicate<A, B, C, D, E> negate() {
return (A a, B b, C c, D d, E e) -> !test(a, b, c, d, e);
}
/**
* Returns a composed predicate that represents a short-circuiting logical
* OR of this predicate and another. When evaluating the composed
* predicate, if this predicate is {@code true}, then the {@code other}
* predicate is not evaluated.
*
* <p>
* Any exceptions thrown during evaluation of either predicate are relayed
* to the caller; if evaluation of this predicate throws an exception, the
* {@code other} predicate will not be evaluated.
*
* @param other a predicate that will be logically-ORed with this predicate
* @return a composed predicate that represents the short-circuiting logical
* OR of this predicate and the {@code other} predicate
* @throws NullPointerException if other is null
*/
default @NonNull PentaPredicate<A, B, C, D, E> or(
@NonNull PentaPredicate<? super A, ? super B, ? super C, ? super D, ? super E> other) {
Objects.requireNonNull(other);
return (A a, B b, C c, D d, E e) -> test(a, b, c, d, e) || other.test(a, b, c, d, e);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/QuadConsumer.java | package ai.timefold.solver.core.api.function;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import org.jspecify.annotations.NonNull;
/**
* Represents a function that accepts four arguments and returns no result.
* This is the three-arity specialization of {@link Consumer}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #accept(Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
* @param <D> the type of the fourth argument to the function
*
* @see Function
*/
@FunctionalInterface
public interface QuadConsumer<A, B, C, D> {
/**
* Performs this operation on the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @param d the fourth function argument
*/
void accept(A a, B b, C c, D d);
default @NonNull QuadConsumer<A, B, C, D> andThen(@NonNull QuadConsumer<? super A, ? super B, ? super C, ? super D> after) {
Objects.requireNonNull(after);
return (a, b, c, d) -> {
accept(a, b, c, d);
after.accept(a, b, c, d);
};
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/QuadFunction.java | package ai.timefold.solver.core.api.function;
import java.util.function.Function;
/**
* Represents a function that accepts four arguments and produces a result.
* This is the four-arity specialization of {@link Function}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #apply(Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
* @param <D> the type of the fourth argument to the function
* @param <R> the type of the result of the function
*
* @see Function
*/
@FunctionalInterface
public interface QuadFunction<A, B, C, D, R> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @param d the fourth function argument
* @return the function result
*/
R apply(A a, B b, C c, D d);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/QuadPredicate.java | package ai.timefold.solver.core.api.function;
import java.util.Objects;
import java.util.function.Predicate;
import org.jspecify.annotations.NonNull;
/**
* Represents a predicate (boolean-valued function) of four arguments.
* This is the four-arity specialization of {@link Predicate}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #test(Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the predicate
* @param <B> the type of the second argument the predicate
* @param <C> the type of the third argument the predicate
* @param <D> the type of the fourth argument the predicate
*
* @see Predicate
*/
@FunctionalInterface
public interface QuadPredicate<A, B, C, D> {
/**
* Evaluates this predicate on the given arguments.
*
* @param a the first input argument
* @param b the second input argument
* @param c the third input argument
* @param d the fourth input argument
* @return {@code true} if the input arguments match the predicate,
* otherwise {@code false}
*/
boolean test(A a, B b, C c, D d);
/**
* Returns a composed predicate that represents a short-circuiting logical
* AND of this predicate and another. When evaluating the composed
* predicate, if this predicate is {@code false}, then the {@code other}
* predicate is not evaluated.
*
* <p>
* Any exceptions thrown during evaluation of either predicate are relayed
* to the caller; if evaluation of this predicate throws an exception, the
* {@code other} predicate will not be evaluated.
*
* @param other a predicate that will be logically-ANDed with this predicate
* @return a composed predicate that represents the short-circuiting logical
* AND of this predicate and the {@code other} predicate
* @throws NullPointerException if other is null
*/
default @NonNull QuadPredicate<A, B, C, D> and(@NonNull QuadPredicate<? super A, ? super B, ? super C, ? super D> other) {
Objects.requireNonNull(other);
return (A a, B b, C c, D d) -> test(a, b, c, d) && other.test(a, b, c, d);
}
/**
* Returns a predicate that represents the logical negation of this
* predicate.
*
* @return a predicate that represents the logical negation of this
* predicate
*/
default QuadPredicate<A, B, C, D> negate() {
return (A a, B b, C c, D d) -> !test(a, b, c, d);
}
/**
* Returns a composed predicate that represents a short-circuiting logical
* OR of this predicate and another. When evaluating the composed
* predicate, if this predicate is {@code true}, then the {@code other}
* predicate is not evaluated.
*
* <p>
* Any exceptions thrown during evaluation of either predicate are relayed
* to the caller; if evaluation of this predicate throws an exception, the
* {@code other} predicate will not be evaluated.
*
* @param other a predicate that will be logically-ORed with this predicate
* @return a composed predicate that represents the short-circuiting logical
* OR of this predicate and the {@code other} predicate
* @throws NullPointerException if other is null
*/
default @NonNull QuadPredicate<A, B, C, D> or(@NonNull QuadPredicate<? super A, ? super B, ? super C, ? super D> other) {
Objects.requireNonNull(other);
return (A a, B b, C c, D d) -> test(a, b, c, d) || other.test(a, b, c, d);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/ToIntQuadFunction.java | package ai.timefold.solver.core.api.function;
/**
* Represents a function that accepts four arguments and produces an int-valued result.
* This is the {@code int}-producing primitive specialization for {@link QuadFunction}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #applyAsInt(Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
* @param <D> the type of the fourth argument to the function
*
* @see QuadFunction
*/
@FunctionalInterface
public interface ToIntQuadFunction<A, B, C, D> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @param d the fourth function argument
* @return the function result
*/
int applyAsInt(A a, B b, C c, D d);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/ToIntTriFunction.java | package ai.timefold.solver.core.api.function;
/**
* Represents a function that accepts three arguments and produces an int-valued result.
* This is the {@code int}-producing primitive specialization for {@link TriFunction}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #applyAsInt(Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
*
* @see TriFunction
*/
@FunctionalInterface
public interface ToIntTriFunction<A, B, C> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @return the function result
*/
int applyAsInt(A a, B b, C c);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/ToLongQuadFunction.java | package ai.timefold.solver.core.api.function;
/**
* Represents a function that accepts four arguments and produces a long-valued result.
* This is the {@code long}-producing primitive specialization for {@link QuadFunction}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #applyAsLong(Object, Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
* @param <D> the type of the fourth argument to the function
*
* @see QuadFunction
*/
@FunctionalInterface
public interface ToLongQuadFunction<A, B, C, D> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @param d the fourth function argument
* @return the function result
*/
long applyAsLong(A a, B b, C c, D d);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/ToLongTriFunction.java | package ai.timefold.solver.core.api.function;
/**
* Represents a function that accepts three arguments and produces a long-valued result.
* This is the {@code long}-producing primitive specialization for {@link TriFunction}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #applyAsLong(Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
*
* @see TriFunction
*/
@FunctionalInterface
public interface ToLongTriFunction<A, B, C> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @return the function result
*/
long applyAsLong(A a, B b, C c);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/TriConsumer.java | package ai.timefold.solver.core.api.function;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import org.jspecify.annotations.NonNull;
/**
* Represents a function that accepts three arguments and returns no result.
* This is the three-arity specialization of {@link Consumer}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #accept(Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
*
* @see Function
*/
@FunctionalInterface
public interface TriConsumer<A, B, C> {
/**
* Performs this operation on the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
*/
void accept(A a, B b, C c);
default @NonNull TriConsumer<A, B, C> andThen(@NonNull TriConsumer<? super A, ? super B, ? super C> after) {
Objects.requireNonNull(after);
return (a, b, c) -> {
accept(a, b, c);
after.accept(a, b, c);
};
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/TriFunction.java | package ai.timefold.solver.core.api.function;
import java.util.function.Function;
/**
* Represents a function that accepts three arguments and produces a result.
* This is the three-arity specialization of {@link Function}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #apply(Object, Object, Object)}.
*
* @param <A> the type of the first argument to the function
* @param <B> the type of the second argument to the function
* @param <C> the type of the third argument to the function
* @param <R> the type of the result of the function
*
* @see Function
*/
@FunctionalInterface
public interface TriFunction<A, B, C, R> {
/**
* Applies this function to the given arguments.
*
* @param a the first function argument
* @param b the second function argument
* @param c the third function argument
* @return the function result
*/
R apply(A a, B b, C c);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/TriPredicate.java | package ai.timefold.solver.core.api.function;
import java.util.Objects;
import java.util.function.Predicate;
import org.jspecify.annotations.NonNull;
/**
* Represents a predicate (boolean-valued function) of three arguments.
* This is the three-arity specialization of {@link Predicate}.
*
* <p>
* This is a <a href="package-summary.html">functional interface</a>
* whose functional method is {@link #test(Object, Object, Object)}.
*
* @param <A> the type of the first argument to the predicate
* @param <B> the type of the second argument the predicate
* @param <C> the type of the third argument the predicate
*
* @see Predicate
*/
@FunctionalInterface
public interface TriPredicate<A, B, C> {
/**
* Evaluates this predicate on the given arguments.
*
* @param a the first input argument
* @param b the second input argument
* @param c the third input argument
* @return {@code true} if the input arguments match the predicate,
* otherwise {@code false}
*/
boolean test(A a, B b, C c);
/**
* Returns a composed predicate that represents a short-circuiting logical
* AND of this predicate and another. When evaluating the composed
* predicate, if this predicate is {@code false}, then the {@code other}
* predicate is not evaluated.
*
* <p>
* Any exceptions thrown during evaluation of either predicate are relayed
* to the caller; if evaluation of this predicate throws an exception, the
* {@code other} predicate will not be evaluated.
*
* @param other a predicate that will be logically-ANDed with this predicate
* @return a composed predicate that represents the short-circuiting logical
* AND of this predicate and the {@code other} predicate
* @throws NullPointerException if other is null
*/
default @NonNull TriPredicate<A, B, C> and(@NonNull TriPredicate<? super A, ? super B, ? super C> other) {
Objects.requireNonNull(other);
return (A a, B b, C c) -> test(a, b, c) && other.test(a, b, c);
}
/**
* Returns a predicate that represents the logical negation of this
* predicate.
*
* @return a predicate that represents the logical negation of this
* predicate
*/
default TriPredicate<A, B, C> negate() {
return (A a, B b, C c) -> !test(a, b, c);
}
/**
* Returns a composed predicate that represents a short-circuiting logical
* OR of this predicate and another. When evaluating the composed
* predicate, if this predicate is {@code true}, then the {@code other}
* predicate is not evaluated.
*
* <p>
* Any exceptions thrown during evaluation of either predicate are relayed
* to the caller; if evaluation of this predicate throws an exception, the
* {@code other} predicate will not be evaluated.
*
* @param other a predicate that will be logically-ORed with this predicate
* @return a composed predicate that represents the short-circuiting logical
* OR of this predicate and the {@code other} predicate
* @throws NullPointerException if other is null
*/
default @NonNull TriPredicate<A, B, C> or(@NonNull TriPredicate<? super A, ? super B, ? super C> other) {
Objects.requireNonNull(other);
return (A a, B b, C c) -> test(a, b, c) || other.test(a, b, c);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/function/package-info.java | /**
* Functions that are not available in {@link java.util.function}.
*/
package ai.timefold.solver.core.api.function;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/AbstractBendableScore.java | package ai.timefold.solver.core.api.score;
import java.util.function.Predicate;
import ai.timefold.solver.core.impl.score.ScoreUtil;
/**
* Abstract superclass for bendable {@link Score} types.
* <p>
* Subclasses must be immutable.
*
* @deprecated Implement {@link IBendableScore} instead.
*/
@Deprecated(forRemoval = true)
public abstract class AbstractBendableScore<Score_ extends AbstractBendableScore<Score_>>
extends AbstractScore<Score_>
implements IBendableScore<Score_> {
protected static final String HARD_LABEL = ScoreUtil.HARD_LABEL;
protected static final String SOFT_LABEL = ScoreUtil.SOFT_LABEL;
protected static final String[] LEVEL_SUFFIXES = ScoreUtil.LEVEL_SUFFIXES;
protected static String[][] parseBendableScoreTokens(Class<? extends AbstractBendableScore<?>> scoreClass,
String scoreString) {
return ScoreUtil.parseBendableScoreTokens(scoreClass, scoreString);
}
protected AbstractBendableScore(int initScore) {
super(initScore);
}
protected String buildBendableShortString(Predicate<Number> notZero) {
return ScoreUtil.buildBendableShortString(this, notZero);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/AbstractScore.java | package ai.timefold.solver.core.api.score;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.function.Predicate;
import ai.timefold.solver.core.api.score.buildin.hardsoft.HardSoftScore;
import ai.timefold.solver.core.impl.score.ScoreUtil;
/**
* Abstract superclass for {@link Score}.
* <p>
* Subclasses must be immutable.
*
* @param <Score_> the actual score type
* @see Score
* @see HardSoftScore
* @deprecated Implement {@link Score} instead.
*/
@Deprecated(forRemoval = true)
public abstract class AbstractScore<Score_ extends AbstractScore<Score_>> implements Score<Score_>,
Serializable {
protected static final String INIT_LABEL = "init";
protected static String[] parseScoreTokens(Class<? extends AbstractScore<?>> scoreClass,
String scoreString, String... levelSuffixes) {
return ScoreUtil.parseScoreTokens(scoreClass, scoreString, levelSuffixes);
}
protected static int parseInitScore(Class<? extends AbstractScore<?>> scoreClass, String scoreString,
String initScoreString) {
try {
return Integer.parseInt(initScoreString);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("The scoreString (" + scoreString
+ ") for the scoreClass (" + scoreClass.getSimpleName() + ") has a initScoreString ("
+ initScoreString + ") which is not a valid integer.", e);
}
}
protected static int parseLevelAsInt(Class<? extends AbstractScore<?>> scoreClass,
String scoreString, String levelString) {
return ScoreUtil.parseLevelAsInt(scoreClass, scoreString, levelString);
}
protected static long parseLevelAsLong(Class<? extends AbstractScore<?>> scoreClass,
String scoreString, String levelString) {
return ScoreUtil.parseLevelAsLong(scoreClass, scoreString, levelString);
}
protected static BigDecimal parseLevelAsBigDecimal(Class<? extends AbstractScore<?>> scoreClass,
String scoreString, String levelString) {
return ScoreUtil.parseLevelAsBigDecimal(scoreClass, scoreString, levelString);
}
protected static String buildScorePattern(boolean bendable, String... levelSuffixes) {
return ScoreUtil.buildScorePattern(bendable, levelSuffixes);
}
// ************************************************************************
// Fields
// ************************************************************************
protected final int initScore;
protected AbstractScore(int initScore) {
this.initScore = initScore;
// The initScore can be positive during statistical calculations.
}
@Override
public int initScore() {
return initScore;
}
// ************************************************************************
// Worker methods
// ************************************************************************
protected String getInitPrefix() {
if (initScore == 0) {
return "";
}
return initScore + INIT_LABEL + "/";
}
protected String buildShortString(Predicate<Number> notZero, String... levelLabels) {
return ScoreUtil.buildShortString(this, notZero, levelLabels);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/IBendableScore.java | package ai.timefold.solver.core.api.score;
import java.io.Serializable;
import ai.timefold.solver.core.api.score.buildin.bendable.BendableScore;
import org.jspecify.annotations.NullMarked;
/**
* Bendable score is a {@link Score} whose {@link #hardLevelsSize()} and {@link #softLevelsSize()}
* are only known at runtime.
*
* <p>
* Interfaces in Timefold are usually not prefixed with "I".
* However, the conflict in name with its implementation ({@link BendableScore}) made this necessary.
* All the other options were considered worse, some even harmful.
* This is a minor issue, as users will access the implementation and not the interface anyway.
*
* @param <Score_> the actual score type to allow addition, subtraction and other arithmetic
*/
@NullMarked
public interface IBendableScore<Score_ extends IBendableScore<Score_>>
extends Score<Score_>, Serializable {
/**
* The sum of this and {@link #softLevelsSize()} equals {@link #levelsSize()}.
*
* @return {@code >= 0} and {@code <} {@link #levelsSize()}
*/
int hardLevelsSize();
/**
* As defined by {@link #hardLevelsSize()}.
*
* @deprecated Use {@link #hardLevelsSize()} instead.
*/
@Deprecated(forRemoval = true)
default int getHardLevelsSize() {
return hardLevelsSize();
}
/**
* The sum of {@link #hardLevelsSize()} and this equals {@link #levelsSize()}.
*
* @return {@code >= 0} and {@code <} {@link #levelsSize()}
*/
int softLevelsSize();
/**
* As defined by {@link #softLevelsSize()}.
*
* @deprecated Use {@link #softLevelsSize()} instead.
*/
@Deprecated(forRemoval = true)
default int getSoftLevelsSize() {
return softLevelsSize();
}
/**
* @return {@link #hardLevelsSize()} + {@link #softLevelsSize()}
*/
default int levelsSize() {
return hardLevelsSize() + softLevelsSize();
}
/**
* As defined by {@link #levelsSize()}.
*
* @deprecated Use {@link #levelsSize()} instead.
*/
@Deprecated(forRemoval = true)
default int getLevelsSize() {
return levelsSize();
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/Score.java | package ai.timefold.solver.core.api.score;
import java.io.Serializable;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.buildin.hardsoft.HardSoftScore;
import ai.timefold.solver.core.api.score.buildin.simple.SimpleScore;
import ai.timefold.solver.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore;
import ai.timefold.solver.core.api.score.buildin.simplelong.SimpleLongScore;
import org.jspecify.annotations.NullMarked;
/**
* A Score is result of the score function (AKA fitness function) on a single possible solution.
*
* Implementations must be immutable,
* preferably a Java record or even a primitive record,
* if the target JDK permits that.
*
* @param <Score_> the actual score type to allow addition, subtraction and other arithmetic
* @see HardSoftScore
*/
@NullMarked
public interface Score<Score_ extends Score<Score_>>
extends Comparable<Score_>, Serializable {
/**
* @return Always zero.
* @deprecated No point in using this method anymore.
*/
@Deprecated(forRemoval = true, since = "1.22.0")
default int initScore() {
return 0;
}
/**
* @return Always zero.
* @deprecated No point in using this method anymore.
*/
@Deprecated(forRemoval = true)
default int getInitScore() {
return 0;
}
/**
* @return this, init score always zero.
* @deprecated No point in using this method anymore.
*/
@Deprecated(forRemoval = true, since = "1.22.0")
@SuppressWarnings("unchecked")
default Score_ withInitScore(int newInitScore) {
return (Score_) this;
}
/**
* Returns a Score whose value is (this + addend).
*
* @param addend value to be added to this Score
* @return this + addend
*/
Score_ add(Score_ addend);
/**
* Returns a Score whose value is (this - subtrahend).
*
* @param subtrahend value to be subtracted from this Score
* @return this - subtrahend, rounded as necessary
*/
Score_ subtract(Score_ subtrahend);
/**
* Returns a Score whose value is (this * multiplicand).
* When rounding is needed, it should be floored (as defined by {@link Math#floor(double)}).
* <p>
* If the implementation has a scale/precision, then the unspecified scale/precision of the double multiplicand
* should have no impact on the returned scale/precision.
*
* @param multiplicand value to be multiplied by this Score.
* @return this * multiplicand
*/
Score_ multiply(double multiplicand);
/**
* Returns a Score whose value is (this / divisor).
* When rounding is needed, it should be floored (as defined by {@link Math#floor(double)}).
* <p>
* If the implementation has a scale/precision, then the unspecified scale/precision of the double divisor
* should have no impact on the returned scale/precision.
*
* @param divisor value by which this Score is to be divided
* @return this / divisor
*/
Score_ divide(double divisor);
/**
* Returns a Score whose value is (this ^ exponent).
* When rounding is needed, it should be floored (as defined by {@link Math#floor(double)}).
* <p>
* If the implementation has a scale/precision, then the unspecified scale/precision of the double exponent
* should have no impact on the returned scale/precision.
*
* @param exponent value by which this Score is to be powered
* @return this ^ exponent
*/
Score_ power(double exponent);
/**
* Returns a Score whose value is (- this).
*
* @return - this
*/
@SuppressWarnings("unchecked")
default Score_ negate() {
var zero = zero();
var current = (Score_) this;
if (zero.equals(current)) {
return current;
}
return zero.subtract(current);
}
/**
* Returns a Score whose value is the absolute value of the score, i.e. |this|.
*/
Score_ abs();
/**
* Returns a Score, all levels of which are zero.
*/
Score_ zero();
/**
*
* @return true when this {@link Object#equals(Object) is equal to} {@link #zero()}.
*/
default boolean isZero() {
return this.equals(zero());
}
/**
* Returns an array of numbers representing the Score. Each number represents 1 score level.
* A greater score level uses a lower array index than a lesser score level.
* <p>
* When rounding is needed, each rounding should be floored (as defined by {@link Math#floor(double)}).
* The length of the returned array must be stable for a specific {@link Score} implementation.
* <p>
* For example: {@code -0hard/-7soft} returns {@code new int{-0, -7}}
*/
Number[] toLevelNumbers();
/**
* As defined by {@link #toLevelNumbers()}, only returns double[] instead of Number[].
*/
default double[] toLevelDoubles() {
var levelNumbers = toLevelNumbers();
var levelDoubles = new double[levelNumbers.length];
for (var i = 0; i < levelNumbers.length; i++) {
levelDoubles[i] = levelNumbers[i].doubleValue();
}
return levelDoubles;
}
/**
* @return always true
* @deprecated No point in using this method anymore.
*/
@Deprecated(forRemoval = true, since = "1.22.0")
default boolean isSolutionInitialized() {
return true;
}
/**
* A {@link PlanningSolution} is feasible if it has no broken hard constraints.
* Simple scores ({@link SimpleScore}, {@link SimpleLongScore}, {@link SimpleBigDecimalScore}) are always feasible.
*
* @return true if the hard score is 0 or higher.
*/
boolean isFeasible();
/**
* Like {@link Object#toString()}, but trims score levels which have a zero weight.
* For example {@literal 0hard/-258soft} returns {@literal -258soft}.
* <p>
* Do not use this format to persist information as text, use {@link Object#toString()} instead,
* so it can be parsed reliably.
*/
String toShortString();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/ScoreExplanation.java | package ai.timefold.solver.core.api.score;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.solution.ProblemFactCollectionProperty;
import ai.timefold.solver.core.api.score.analysis.ScoreAnalysis;
import ai.timefold.solver.core.api.score.calculator.ConstraintMatchAwareIncrementalScoreCalculator;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatch;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatchTotal;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import ai.timefold.solver.core.api.score.constraint.Indictment;
import ai.timefold.solver.core.api.score.stream.Constraint;
import ai.timefold.solver.core.api.score.stream.ConstraintJustification;
import ai.timefold.solver.core.api.score.stream.DefaultConstraintJustification;
import ai.timefold.solver.core.api.solver.SolutionManager;
import org.jspecify.annotations.NonNull;
/**
* Build by {@link SolutionManager#explain(Object)} to hold {@link ConstraintMatchTotal}s and {@link Indictment}s
* necessary to explain the quality of a particular {@link Score}.
* <p>
* For a simplified, faster and JSON-friendly alternative, see {@link ScoreAnalysis}.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the actual score type
*/
public interface ScoreExplanation<Solution_, Score_ extends Score<Score_>> {
/**
* Retrieve the {@link PlanningSolution} that the score being explained comes from.
*/
@NonNull
Solution_ getSolution();
/**
* Return the {@link Score} being explained.
* If the specific {@link Score} type used by the {@link PlanningSolution} is required,
* call {@link #getSolution()} and retrieve it from there.
*/
@NonNull
Score_ getScore();
/**
* Whether {@link #getSolution()} is initialized or not.
*
* @return true if initialized
*/
boolean isInitialized();
/**
* Returns a diagnostic text that explains the solution through the {@link ConstraintMatch} API to identify which
* constraints or planning entities cause that score quality.
* <p>
* In case of an {@link Score#isFeasible() infeasible} solution, this can help diagnose the cause of that.
*
* <p>
* Do not parse the return value, its format may change without warning.
* Instead, to provide this information in a UI or a service,
* use {@link ScoreExplanation#getConstraintMatchTotalMap()} and {@link ScoreExplanation#getIndictmentMap()}
* and convert those into a domain-specific API.
*/
@NonNull
String getSummary();
/**
* Explains the {@link Score} of {@link #getScore()} by splitting it up per {@link Constraint}.
* <p>
* The sum of {@link ConstraintMatchTotal#getScore()} equals {@link #getScore()}.
*
* @return the key is the constraintId
* (to create one, use {@link ConstraintRef#composeConstraintId(String, String)}).
* @see #getIndictmentMap()
*/
@NonNull
Map<String, ConstraintMatchTotal<Score_>> getConstraintMatchTotalMap();
/**
* Explains the {@link Score} of {@link #getScore()} for all constraints.
* The return value of this method is determined by several factors:
*
* <ul>
* <li>
* With Constraint Streams, the user has an option to provide a custom justification mapping,
* implementing {@link ConstraintJustification}.
* If provided, every {@link ConstraintMatch} of such constraint will be associated with this custom justification class.
* Every constraint not associated with a custom justification class
* will be associated with {@link DefaultConstraintJustification}.
* </li>
* <li>
* With {@link ConstraintMatchAwareIncrementalScoreCalculator},
* every {@link ConstraintMatch} will be associated with the justification class that the user created it with.
* </li>
* </ul>
*
* @return all constraint matches
* @see #getIndictmentMap()
*/
@NonNull
List<ConstraintJustification> getJustificationList();
/**
* Explains the {@link Score} of {@link #getScore()} for all constraints
* justified with a given {@link ConstraintJustification} type.
* Otherwise, as defined by {@link #getJustificationList()}.
* May be empty, if the score explanation ran with justification support disabled.
*
* @return all constraint matches associated with the given justification class
* @see #getIndictmentMap()
*/
default <ConstraintJustification_ extends ConstraintJustification> @NonNull List<ConstraintJustification_>
getJustificationList(@NonNull Class<? extends ConstraintJustification_> constraintJustificationClass) {
return getJustificationList()
.stream()
.filter(constraintJustification -> constraintJustificationClass
.isAssignableFrom(constraintJustification.getClass()))
.map(constraintJustification -> (ConstraintJustification_) constraintJustification)
.collect(Collectors.toList());
}
/**
* Explains the impact of each planning entity or problem fact on the {@link Score}.
* An {@link Indictment} is basically the inverse of a {@link ConstraintMatchTotal}:
* it is a {@link Score} total for any of the {@link ConstraintMatch#getIndictedObjectList() indicted objects}.
* <p>
* The sum of {@link ConstraintMatchTotal#getScore()} differs from {@link #getScore()}
* because each {@link ConstraintMatch#getScore()} is counted
* for each of the {@link ConstraintMatch#getIndictedObjectList() indicted objects}.
*
* @return the key is a {@link ProblemFactCollectionProperty problem fact} or a
* {@link PlanningEntity planning entity}
* @see #getConstraintMatchTotalMap()
*/
@NonNull
Map<Object, Indictment<Score_>> getIndictmentMap();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/ScoreManager.java | package ai.timefold.solver.core.api.score;
import static ai.timefold.solver.core.api.solver.SolutionUpdatePolicy.UPDATE_ALL;
import java.util.UUID;
import ai.timefold.solver.core.api.domain.solution.PlanningScore;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.calculator.EasyScoreCalculator;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatch;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatchTotal;
import ai.timefold.solver.core.api.score.constraint.Indictment;
import ai.timefold.solver.core.api.solver.SolutionManager;
import ai.timefold.solver.core.api.solver.SolutionUpdatePolicy;
import ai.timefold.solver.core.api.solver.SolverFactory;
import ai.timefold.solver.core.api.solver.SolverManager;
import ai.timefold.solver.core.impl.score.DefaultScoreManager;
/**
* A stateless service to help calculate {@link Score}, {@link ConstraintMatchTotal}, {@link Indictment}, etc.
* <p>
* To create a ScoreManager, use {@link #create(SolverFactory)}.
* <p>
* These methods are thread-safe unless explicitly stated otherwise.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the actual score type
* @deprecated Use {@link SolutionManager} instead.
*/
@Deprecated(forRemoval = true)
public interface ScoreManager<Solution_, Score_ extends Score<Score_>> {
// ************************************************************************
// Static creation methods: SolverFactory
// ************************************************************************
/**
* Uses a {@link SolverFactory} to build a {@link ScoreManager}.
*
* @param solverFactory never null
* @return never null
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the actual score type
*/
static <Solution_, Score_ extends Score<Score_>> ScoreManager<Solution_, Score_> create(
SolverFactory<Solution_> solverFactory) {
return new DefaultScoreManager<>(SolutionManager.<Solution_, Score_> create(solverFactory));
}
/**
* Uses a {@link SolverManager} to build a {@link ScoreManager}.
*
* @param solverManager never null
* @return never null
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the actual score type
* @param <ProblemId_> the ID type of a submitted problem, such as {@link Long} or {@link UUID}
*/
static <Solution_, Score_ extends Score<Score_>, ProblemId_> ScoreManager<Solution_, Score_> create(
SolverManager<Solution_, ProblemId_> solverManager) {
return new DefaultScoreManager<>(SolutionManager.<Solution_, Score_, ProblemId_> create(solverManager));
}
// ************************************************************************
// Interface methods
// ************************************************************************
/**
* Calculates the {@link Score} of a {@link PlanningSolution} and updates its {@link PlanningScore} member.
* <p>
* It is equivalent to calling {@link #update(Object, SolutionUpdatePolicy)}
* with {@link SolutionUpdatePolicy#UPDATE_SCORE_ONLY}.
* This policy doesn't update shadow variables, which carries a performance advantage
* but also brings additional limitations.
* Please review the {@link SolutionUpdatePolicy} documentation for details.
*
* @param solution never null
*/
Score_ updateScore(Solution_ solution);
/**
* Returns a diagnostic text that explains the solution through the {@link ConstraintMatch} API to identify which
* constraints or planning entities cause that score quality.
* In case of an {@link Score#isFeasible() infeasible} solution, this can help diagnose the cause of that.
* <p>
* Don't parse this string.
* Instead, to provide this information in a UI or a service, use {@link #explainScore(Object)}
* to retrieve {@link ScoreExplanation#getConstraintMatchTotalMap()} and {@link ScoreExplanation#getIndictmentMap()}
* and convert those into a domain specific API.
*
* @param solution never null
* @return null if {@link #updateScore(Object)} returns null with the same solution
* @throws IllegalStateException when constraint matching is disabled or not supported by the underlying score
* calculator, such as {@link EasyScoreCalculator}.
*/
String getSummary(Solution_ solution);
/**
* Calculates and retrieves {@link ConstraintMatchTotal}s and {@link Indictment}s necessary for describing the
* quality of a particular solution.
* <p>
* It is equivalent to calling {@link #explain(Object, SolutionUpdatePolicy)}
* with {@link SolutionUpdatePolicy#UPDATE_SCORE_ONLY}.
* This policy doesn't update shadow variables, which carries a performance advantage
* but also brings additional limitations.
* Please review the {@link SolutionUpdatePolicy} documentation for details.
*
* @param solution never null
* @return never null
* @throws IllegalStateException when constraint matching is disabled or not supported by the underlying score
* calculator, such as {@link EasyScoreCalculator}.
*/
ScoreExplanation<Solution_, Score_> explainScore(Solution_ solution);
/**
* As defined by {@link #update(Object, SolutionUpdatePolicy)},
* using {@link SolutionUpdatePolicy#UPDATE_ALL}.
*
*/
default Score_ update(Solution_ solution) {
return update(solution, UPDATE_ALL);
}
/**
* Updates the given solution according to the {@link SolutionUpdatePolicy}.
*
* @param solution never null
* @param solutionUpdatePolicy never null; if unsure, pick {@link SolutionUpdatePolicy#UPDATE_ALL}
* @return possibly null if already null and {@link SolutionUpdatePolicy} didn't cause its update
* @see SolutionUpdatePolicy Description of individual policies with respect to performance trade-offs.
*/
Score_ update(Solution_ solution, SolutionUpdatePolicy solutionUpdatePolicy);
/**
* As defined by {@link #explain(Object)},
* using {@link SolutionUpdatePolicy#UPDATE_ALL}.
*/
default ScoreExplanation<Solution_, Score_> explain(Solution_ solution) {
return explain(solution, UPDATE_ALL);
}
/**
* Calculates and retrieves {@link ConstraintMatchTotal}s and {@link Indictment}s necessary for describing the
* quality of a particular solution.
*
* @param solution never null
* @param solutionUpdatePolicy never null; if unsure, pick {@link SolutionUpdatePolicy#UPDATE_ALL}
* @return never null
* @throws IllegalStateException when constraint matching is disabled or not supported by the underlying score
* calculator, such as {@link EasyScoreCalculator}.
* @see SolutionUpdatePolicy Description of individual policies with respect to performance trade-offs.
*/
ScoreExplanation<Solution_, Score_> explain(Solution_ solution, SolutionUpdatePolicy solutionUpdatePolicy);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/package-info.java | /**
* Classes used for {@link ai.timefold.solver.core.api.score.Score} calculation.
*/
package ai.timefold.solver.core.api.score;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/analysis/ConstraintAnalysis.java | package ai.timefold.solver.core.api.score.analysis;
import static ai.timefold.solver.core.api.score.analysis.ScoreAnalysis.DEFAULT_SUMMARY_CONSTRAINT_MATCH_LIMIT;
import static java.util.Comparator.comparing;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Stream;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.calculator.ConstraintMatchAwareIncrementalScoreCalculator;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import ai.timefold.solver.core.api.score.stream.ConstraintJustification;
import ai.timefold.solver.core.api.solver.SolutionManager;
import ai.timefold.solver.core.impl.score.constraint.DefaultConstraintMatchTotal;
import ai.timefold.solver.core.impl.util.CollectionUtils;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* Note: Users should never create instances of this type directly.
* It is available transitively via {@link SolutionManager#analyze(Object)}.
*
* @param <Score_>
* @param matches null if analysis not available;
* empty if constraint has no matches, but still non-zero constraint weight;
* non-empty if constraint has matches.
* This is a {@link List} to simplify access to individual elements,
* but it contains no duplicates just like {@link HashSet} wouldn't.
* @param matchCount
* <ul>
* <li>For regular constraint analysis:
* -1 if analysis not available,
* 0 if constraint has no matches,
* positive if constraint has matches.
* Equal to the size of the {@link #matches} list.</li>
* <li>For a {@link ScoreAnalysis#diff(ScoreAnalysis) diff of constraint analyses}:
* positive if the constraint has more matches in the new analysis,
* zero if the number of matches is the same in both,
* negative otherwise.
* Need not be equal to the size of the {@link #matches} list.</li>
* </ul>
*/
public record ConstraintAnalysis<Score_ extends Score<Score_>>(@NonNull ConstraintRef constraintRef, @NonNull Score_ weight,
@NonNull Score_ score, @Nullable List<MatchAnalysis<Score_>> matches, int matchCount) {
public ConstraintAnalysis(@NonNull ConstraintRef constraintRef, @NonNull Score_ weight, @NonNull Score_ score,
@Nullable List<MatchAnalysis<Score_>> matches) {
this(constraintRef, weight, score, matches, matches == null ? -1 : matches.size());
}
public ConstraintAnalysis {
Objects.requireNonNull(constraintRef);
/*
* Null only possible in ConstraintMatchAwareIncrementalScoreCalculator and/or tests.
* Easy doesn't support constraint analysis at all.
* CS always provides constraint weights.
*/
Objects.requireNonNull(weight, () -> """
The constraint weight must be non-null.
Maybe use a non-deprecated %s constructor in your %s implementation?"""
.formatted(DefaultConstraintMatchTotal.class.getSimpleName(),
ConstraintMatchAwareIncrementalScoreCalculator.class.getSimpleName()));
Objects.requireNonNull(score);
}
@NonNull
ConstraintAnalysis<Score_> negate() {
// Only used to compute diff; use semantics for non-diff.
// A negative match count is only allowed within these semantics when matches == null.
if (matches == null) {
// At this point, matchCount is already negative, as matches == null.
return new ConstraintAnalysis<>(constraintRef, weight.negate(), score.negate(), null, matchCount);
} else {
// Within these semantics, match count == list size.
var negatedMatchAnalysesList = matches.stream()
.map(MatchAnalysis::negate)
.toList();
return new ConstraintAnalysis<>(constraintRef, weight.negate(), score.negate(), negatedMatchAnalysesList,
matchCount);
}
}
static <Score_ extends Score<Score_>> @NonNull ConstraintAnalysis<Score_> diff(
@NonNull ConstraintRef constraintRef, @Nullable ConstraintAnalysis<Score_> constraintAnalysis,
@Nullable ConstraintAnalysis<Score_> otherConstraintAnalysis) {
if (constraintAnalysis == null) {
if (otherConstraintAnalysis == null) {
throw new IllegalStateException(
"Impossible state: none of the score explanations provided constraint matches for a constraint (%s)."
.formatted(constraintRef));
}
// No need to compute diff; this constraint is not present in this score explanation.
return otherConstraintAnalysis.negate();
} else if (otherConstraintAnalysis == null) {
// No need to compute diff; this constraint is not present in the other score explanation.
return constraintAnalysis;
}
var matchAnalyses = constraintAnalysis.matches();
var otherMatchAnalyses = otherConstraintAnalysis.matches();
if ((matchAnalyses == null && otherMatchAnalyses != null) || (matchAnalyses != null && otherMatchAnalyses == null)) {
throw new IllegalStateException(
"Impossible state: One of the score analyses (%s, %s) provided no match analysis for a constraint (%s)."
.formatted(constraintAnalysis, otherConstraintAnalysis, constraintRef));
}
// Compute the diff.
var constraintWeightDifference = constraintAnalysis.weight().subtract(otherConstraintAnalysis.weight());
var scoreDifference = constraintAnalysis.score().subtract(otherConstraintAnalysis.score());
if (matchAnalyses == null) {
var leftHasMatchCount = hasMatchCount(constraintAnalysis);
var rightHasMatchCount = hasMatchCount(otherConstraintAnalysis);
if ((!leftHasMatchCount && rightHasMatchCount) || (leftHasMatchCount && !rightHasMatchCount)) {
throw new IllegalStateException(
"Impossible state: One of the score analyses (%s, %s) provided no match count for a constraint (%s)."
.formatted(constraintAnalysis, otherConstraintAnalysis, constraintRef));
}
return new ConstraintAnalysis<>(constraintRef, constraintWeightDifference, scoreDifference, null,
getMatchCount(constraintAnalysis, otherConstraintAnalysis));
}
var matchAnalysisMap = mapMatchesToJustifications(matchAnalyses);
var otherMatchAnalysisMap = mapMatchesToJustifications(otherMatchAnalyses);
var matchAnalysesList = Stream.concat(matchAnalysisMap.keySet().stream(), otherMatchAnalysisMap.keySet().stream())
.distinct()
.flatMap(justification -> {
var matchAnalysis = matchAnalysisMap.get(justification);
var otherMatchAnalysis = otherMatchAnalysisMap.get(justification);
if (matchAnalysis == null) {
if (otherMatchAnalysis == null) {
throw new IllegalStateException(
"Impossible state: none of the match analyses provided for a constraint (%s)."
.formatted(constraintRef));
}
// No need to compute diff; this match is not present in this score explanation.
return Stream.of(otherMatchAnalysis.negate());
} else if (otherMatchAnalysis == null) {
// No need to compute diff; this match is not present in the other score explanation.
return Stream.of(matchAnalysis);
} else if (!matchAnalysis.equals(otherMatchAnalysis)) { // Compute the diff.
return Stream.of(new MatchAnalysis<>(constraintRef,
matchAnalysis.score().subtract(otherMatchAnalysis.score()), justification));
} else { // There is no difference; skip entirely.
return Stream.empty();
}
}).toList();
return new ConstraintAnalysis<>(constraintRef, constraintWeightDifference, scoreDifference, matchAnalysesList,
getMatchCount(constraintAnalysis, otherConstraintAnalysis));
}
private static boolean hasMatchCount(ConstraintAnalysis<?> analysis) {
return analysis.matchCount >= 0;
}
private static int getMatchCount(ConstraintAnalysis<?> analysis, ConstraintAnalysis<?> otherAnalysis) {
return analysis.matchCount() - otherAnalysis.matchCount();
}
private static <Score_ extends Score<Score_>> Map<ConstraintJustification, MatchAnalysis<Score_>>
mapMatchesToJustifications(List<MatchAnalysis<Score_>> matchAnalyses) {
Map<ConstraintJustification, MatchAnalysis<Score_>> matchAnalysisMap =
CollectionUtils.newLinkedHashMap(matchAnalyses.size());
for (var matchAnalysis : matchAnalyses) {
var previous = matchAnalysisMap.put(matchAnalysis.justification(), matchAnalysis);
if (previous != null) {
// Match analysis for the same justification should have been merged already.
throw new IllegalStateException(
"Impossible state: multiple constraint matches (%s, %s) have the same justification (%s)."
.formatted(previous, matchAnalysis, matchAnalysis.justification()));
}
}
return matchAnalysisMap;
}
/**
* Return package name of the constraint that this analysis is for.
*
* @return equal to {@code constraintRef.packageName()}
* @deprecated Do not rely on constraint package in user code.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
public String constraintPackage() {
return constraintRef.packageName();
}
/**
* Return name of the constraint that this analysis is for.
*
* @return equal to {@code constraintRef.constraintName()}
*/
public @NonNull String constraintName() {
return constraintRef.constraintName();
}
/**
* Returns a diagnostic text that explains part of the score quality through the {@link ConstraintAnalysis} API.
* The string is built fresh every time the method is called.
*/
@SuppressWarnings("java:S3457")
public @NonNull String summarize() {
var summary = new StringBuilder();
summary.append("""
Explanation of score (%s):
Constraint matches:
""".formatted(score));
Comparator<MatchAnalysis<Score_>> matchScoreComparator = comparing(MatchAnalysis::score);
var constraintMatches = matches();
if (constraintMatches == null) {
throw new IllegalArgumentException("""
The constraint matches must be non-null.
Maybe use ScoreAnalysisFetchPolicy.FETCH_ALL to request the score analysis
""");
}
if (constraintMatches.isEmpty()) {
summary.append(
"%8s%s: constraint (%s) has no matches.\n".formatted(" ", score().toShortString(),
constraintRef().constraintName()));
} else {
summary.append("%8s%s: constraint (%s) has %s matches:\n".formatted(" ", score().toShortString(),
constraintRef().constraintName(), constraintMatches.size()));
}
constraintMatches.stream()
.sorted(matchScoreComparator)
.limit(DEFAULT_SUMMARY_CONSTRAINT_MATCH_LIMIT)
.forEach(match -> summary.append("%12S%s: justified with (%s)\n".formatted(" ", match.score().toShortString(),
match.justification())));
if (constraintMatches.size() > DEFAULT_SUMMARY_CONSTRAINT_MATCH_LIMIT) {
summary.append("%12s%s\n".formatted(" ", "..."));
}
return summary.toString();
}
@Override
public String toString() {
if (matches == null) {
if (matchCount == -1) {
return "(%s at %s, constraint matching disabled)"
.formatted(score, weight);
} else {
return "(%s at %s, %d matches, justifications disabled)"
.formatted(score, weight, matchCount);
}
} else {
return "(%s at %s, %d matches with justifications)"
.formatted(score, weight, matches.size());
}
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/analysis/MatchAnalysis.java | package ai.timefold.solver.core.api.score.analysis;
import java.util.Objects;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import ai.timefold.solver.core.api.score.stream.ConstraintJustification;
import ai.timefold.solver.core.api.score.stream.ConstraintProvider;
import ai.timefold.solver.core.api.solver.SolutionManager;
import org.jspecify.annotations.NonNull;
/**
* Note: Users should never create instances of this type directly.
* It is available transitively via {@link SolutionManager#analyze(Object)}.
*
* @param <Score_>
*/
public record MatchAnalysis<Score_ extends Score<Score_>>(@NonNull ConstraintRef constraintRef, @NonNull Score_ score,
@NonNull ConstraintJustification justification) implements Comparable<MatchAnalysis<Score_>> {
public MatchAnalysis {
Objects.requireNonNull(constraintRef);
Objects.requireNonNull(score);
// Null justification is impossible;
// if the fetch policy doesn't requre match analysis, the code shouldn't even get here.
Objects.requireNonNull(justification, () -> """
Impossible state: Received a null justification.
Maybe check your %s's justifyWith() implementation for that constraint?"""
.formatted(ConstraintProvider.class));
}
MatchAnalysis<Score_> negate() {
return new MatchAnalysis<>(constraintRef, score.negate(), justification);
}
@Override
public int compareTo(MatchAnalysis<Score_> other) {
int constraintRefComparison = this.constraintRef.compareTo(other.constraintRef);
if (constraintRefComparison != 0) {
return constraintRefComparison;
}
int scoreComparison = this.score.compareTo(other.score);
if (scoreComparison != 0) {
return scoreComparison;
} else {
if (this.justification instanceof Comparable comparableJustification
&& other.justification instanceof Comparable otherComparableJustification) {
return comparableJustification.compareTo(otherComparableJustification);
} else {
return 0;
}
}
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/analysis/ScoreAnalysis.java | package ai.timefold.solver.core.api.score.analysis;
import static java.util.Comparator.comparing;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.ScoreExplanation;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import ai.timefold.solver.core.api.score.stream.Constraint;
import ai.timefold.solver.core.api.score.stream.ConstraintJustification;
import ai.timefold.solver.core.api.solver.ScoreAnalysisFetchPolicy;
import ai.timefold.solver.core.api.solver.SolutionManager;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* Represents the breakdown of a {@link Score} into individual {@link ConstraintAnalysis} instances,
* one for each constraint.
* Compared to {@link ScoreExplanation}, this is JSON-friendly and faster to generate.
*
* <p>
* In order to be fully serializable to JSON, {@link MatchAnalysis} instances must be serializable to JSON
* and that requires any implementations of {@link ConstraintJustification} to be serializable to JSON.
* This is the responsibility of the user.
*
* <p>
* For deserialization from JSON, the user needs to provide the deserializer themselves.
* This is due to the fact that, once the {@link ScoreAnalysis} is received over the wire,
* we no longer know which {@link Score} type or {@link ConstraintJustification} type was used.
* The user has all of that information in their domain model,
* and so they are the correct party to provide the deserializer.
*
* <p>
* Note: the constructors of this record are off-limits.
* We ask users to use exclusively {@link SolutionManager#analyze(Object)} to obtain instances of this record.
*
* @param score Score of the solution being analyzed.
* @param constraintMap for each constraint identified by its {@link Constraint#getConstraintRef()},
* the {@link ConstraintAnalysis} that describes the impact of that constraint on the overall score.
* <p>
* Zero-weight constraints are never included, they are excluded from score calculation in the first place.
* Otherwise constraints are always included, even if they have no matches,
* unless the score analysis represents a diff between two other analyses.
*
* <p>
* In the case of a diff:
*
* <ul>
* <li>If the constraint weight diff is non-zero,
* or if the score diff for the constraint is non-zero,
* the constraint diff will be included.</li>
* <li>
* Otherwise if constraint matching is disabled ({@link ScoreAnalysisFetchPolicy#FETCH_SHALLOW})
* or if only match counts are available ({@link ScoreAnalysisFetchPolicy#FETCH_MATCH_COUNT}),
* constraint diff will only be included if it has a non-zero match count diff.
* </li>
* <li>
* Otherwise (when constraint matching is fully enabled with {@link ScoreAnalysisFetchPolicy#FETCH_ALL})
* the constraint diff will not be included if the diff of its constraint matches is empty.
* (In other words: when diffing, the analysis for a particular constraint won't be available
* if we can guarantee that the constraint matches are identical in both analyses.)
* </li>
* </ul>
*
* <p>
* Entries in the map have a stable iteration order; items are ordered first by {@link ConstraintAnalysis#weight()},
* then by {@link ConstraintAnalysis#constraintRef()}.
* @param isSolutionInitialized Whether the solution was fully initialized at the time of analysis.
*
* @param <Score_>
*/
public record ScoreAnalysis<Score_ extends Score<Score_>>(@NonNull Score_ score,
@NonNull Map<ConstraintRef, ConstraintAnalysis<Score_>> constraintMap,
boolean isSolutionInitialized) {
@SuppressWarnings({ "unchecked", "rawtypes" })
private static final Comparator<ConstraintAnalysis<?>> REVERSED_WEIGHT_COMPARATOR =
Comparator.<ConstraintAnalysis<?>, Score> comparing(ConstraintAnalysis::weight)
.reversed();
private static final Comparator<ConstraintAnalysis<?>> MAP_COMPARATOR =
REVERSED_WEIGHT_COMPARATOR.thenComparing(ConstraintAnalysis::constraintRef);
static final int DEFAULT_SUMMARY_CONSTRAINT_MATCH_LIMIT = 3;
/**
* As defined by {@link #ScoreAnalysis(Score, Map, boolean)},
* with the final argument set to true.
*/
public ScoreAnalysis(@NonNull Score_ score, @NonNull Map<ConstraintRef, ConstraintAnalysis<Score_>> constraintMap) {
this(score, constraintMap, true);
}
public ScoreAnalysis {
Objects.requireNonNull(score, "score");
Objects.requireNonNull(constraintMap, "constraintMap");
// Ensure consistent order and no external interference.
constraintMap = Collections.unmodifiableMap(constraintMap.values()
.stream()
.sorted(MAP_COMPARATOR)
.collect(Collectors.toMap(
ConstraintAnalysis::constraintRef,
Function.identity(),
(constraintAnalysis, otherConstraintAnalysis) -> constraintAnalysis,
LinkedHashMap::new)));
}
/**
* Performs a lookup on {@link #constraintMap()}.
* Equivalent to {@code constraintMap().get(constraintRef)}.
*
* @return null if no constraint matches of such constraint are present
*/
public @Nullable ConstraintAnalysis<Score_> getConstraintAnalysis(@NonNull ConstraintRef constraintRef) {
return constraintMap.get(constraintRef);
}
/**
* As defined by {@link #getConstraintAnalysis(ConstraintRef)}
* where the arguments are first composed into a singular constraint ID.
*
* @return null if no constraint matches of such constraint are present
* @deprecated Use {@link #getConstraintAnalysis(String)} instead.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
public @Nullable ConstraintAnalysis<Score_> getConstraintAnalysis(@NonNull String constraintPackage,
@NonNull String constraintName) {
return getConstraintAnalysis(ConstraintRef.of(constraintPackage, constraintName));
}
/**
* As defined by {@link #getConstraintAnalysis(ConstraintRef)}.
*
* @return null if no constraint matches of such constraint are present
* @throws IllegalStateException if multiple constraints with the same name are present,
* which is possible if they are in different constraint packages.
* Constraint packages are deprecated, we recommend avoiding them and instead naming constraints uniquely.
* If you must use constraint packages, see {@link #getConstraintAnalysis(String, String)}
* (also deprecated) and reach out to us to discuss your use case.
*/
public @Nullable ConstraintAnalysis<Score_> getConstraintAnalysis(@NonNull String constraintName) {
var constraintAnalysisList = constraintMap.entrySet()
.stream()
.filter(entry -> entry.getKey().constraintName().equals(constraintName))
.map(Map.Entry::getValue)
.toList();
return switch (constraintAnalysisList.size()) {
case 0 -> null;
case 1 -> constraintAnalysisList.get(0);
default -> throw new IllegalStateException("""
Multiple constraints with the same name (%s) are present in the score analysis.
This may be caused by the use of multiple constraint packages, a deprecated feature.
Please avoid using constraint packages and keep constraint names unique."""
.formatted(constraintName));
};
}
/**
* Compare this {@link ScoreAnalysis} to another {@link ScoreAnalysis}
* and retrieve the difference between them.
* The comparison is in the direction of {@code this - other}.
* <p>
* Example: if {@code this} has a score of 100 and {@code other} has a score of 90,
* the returned {@link ScoreAnalysis#score} will be 10.
* If this and other were inverted, the score would have been -10.
* The same applies to all other properties of {@link ScoreAnalysis}.
*
* <p>
* In order to properly diff {@link MatchAnalysis} against each other,
* we rely on the user implementing {@link ConstraintJustification} equality correctly.
* In other words, the diff will consider two justifications equal if the user says they are equal,
* and it expects the hash code to be consistent with equals.
*
* <p>
* If one {@link ScoreAnalysis} provides {@link MatchAnalysis} and the other doesn't, exception is thrown.
* Such {@link ScoreAnalysis} instances are mutually incompatible.
*
* <p>
* If {@code this} came from a fully initialized solution,
* {@link #isSolutionInitialized} will be true.
* False otherwise.
*/
public @NonNull ScoreAnalysis<Score_> diff(@NonNull ScoreAnalysis<Score_> other) {
var result = Stream.concat(constraintMap.keySet().stream(),
other.constraintMap.keySet().stream())
.distinct()
.flatMap(constraintRef -> {
var constraintAnalysis = getConstraintAnalysis(constraintRef);
var otherConstraintAnalysis = other.getConstraintAnalysis(constraintRef);
var diff = ConstraintAnalysis.diff(constraintRef, constraintAnalysis, otherConstraintAnalysis);
// The following code implements logic to decide which information the user needs to see,
// and which is information we can safely discard.
// This is done so that the diff (which is likely to be serialized into JSON) is not bloated.
if (!diff.weight().isZero() || !diff.score().isZero()) { // Guaranteed change.
return Stream.of(diff);
}
// Figuring out whether constraint matches changed is tricky.
// Can't use constraint weight; weight diff on the same constraint is zero if weight unchanged.
// Can't use matchCount; matchCount diff can be zero if one match was added and another removed.
// To detect if the constraint matches changed, we use the actual match diff.
if (diff.matches() == null) {
// If it is null, either justifications are disabled,
// or constraint matching is disabled altogether.
// This means we don't have enough information to make smarter decisions.
if (diff.matchCount() == 0) {
// Returning this makes no practical sense.
// The result would be constraint name + zero weight + zero score + zero match count.
return Stream.empty();
} else {
return Stream.of(diff);
}
} else if (!diff.matches().isEmpty()) {
// We actually have constraint matches, and they are meaningfully different.
return Stream.of(diff);
} else {
// This will be empty only if all matches are exactly the same.
return Stream.empty();
}
})
.collect(Collectors.toMap(
ConstraintAnalysis::constraintRef,
Function.identity(),
(constraintRef, otherConstraintRef) -> constraintRef,
HashMap::new));
return new ScoreAnalysis<>(score.subtract(other.score()), result, isSolutionInitialized);
}
/**
* Returns individual {@link ConstraintAnalysis} instances that make up this {@link ScoreAnalysis}.
*
* @return equivalent to {@code constraintMap().values()}
*/
public Collection<ConstraintAnalysis<Score_>> constraintAnalyses() {
return constraintMap.values();
}
/**
* Returns a diagnostic text that explains the solution through the {@link ConstraintAnalysis} API to identify which
* constraints cause that score quality.
* The string is built fresh every time the method is called.
* <p>
* In case of an {@link Score#isFeasible() infeasible} solution, this can help diagnose the cause of that.
*
* <p>
* Do not parse the return value, its format may change without warning.
* Instead, provide this information in a UI or a service,
* use {@link ScoreAnalysis#constraintAnalyses()}
* and convert those into a domain-specific API.
*/
@SuppressWarnings("java:S3457")
public @NonNull String summarize() {
StringBuilder summary = new StringBuilder();
summary.append("""
Explanation of score (%s):
Constraint matches:
""".formatted(score));
Comparator<ConstraintAnalysis<Score_>> constraintsScoreComparator = comparing(ConstraintAnalysis::score);
Comparator<MatchAnalysis<Score_>> matchScoreComparator = comparing(MatchAnalysis::score);
constraintAnalyses().stream()
.sorted(constraintsScoreComparator)
.forEach(constraint -> {
var matches = constraint.matches();
if (matches == null) {
throw new IllegalArgumentException("""
The constraint matches must be non-null.
Maybe use ScoreAnalysisFetchPolicy.FETCH_ALL to request the score analysis
""");
}
if (matches.isEmpty()) {
summary.append(
"%8s%s: constraint (%s) has no matches.\n".formatted(" ", constraint.score().toShortString(),
constraint.constraintRef().constraintName()));
} else {
summary.append(
"%8s%s: constraint (%s) has %s matches:\n".formatted(" ", constraint.score().toShortString(),
constraint.constraintRef().constraintName(), matches.size()));
}
matches.stream()
.sorted(matchScoreComparator)
.limit(DEFAULT_SUMMARY_CONSTRAINT_MATCH_LIMIT)
.forEach(match -> summary
.append("%12s%s: justified with (%s)\n".formatted(" ", match.score().toShortString(),
match.justification())));
if (matches.size() > DEFAULT_SUMMARY_CONSTRAINT_MATCH_LIMIT) {
summary.append("%12s%s\n".formatted(" ", "..."));
}
});
return summary.toString();
}
@Override
public String toString() {
return "Score analysis of score %s with %d constraints.".formatted(score, constraintMap.size());
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/bendable/BendableScore.java | package ai.timefold.solver.core.api.score.buildin.bendable;
import java.util.Arrays;
import java.util.Objects;
import ai.timefold.solver.core.api.score.IBendableScore;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import ai.timefold.solver.core.impl.score.buildin.BendableScoreDefinition;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on n levels of int constraints.
* The number of levels is bendable at configuration time.
* <p>
* This class is immutable.
* <p>
* The {@link #hardLevelsSize()} and {@link #softLevelsSize()} must be the same as in the
* {@link BendableScoreDefinition} used.
*
* @see Score
*/
@NullMarked
public final class BendableScore implements IBendableScore<BendableScore> {
public static BendableScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseBendableScoreTokens(BendableScore.class, scoreString);
var hardScores = new int[scoreTokens[0].length];
for (var i = 0; i < hardScores.length; i++) {
hardScores[i] = ScoreUtil.parseLevelAsInt(BendableScore.class, scoreString, scoreTokens[0][i]);
}
var softScores = new int[scoreTokens[1].length];
for (var i = 0; i < softScores.length; i++) {
softScores[i] = ScoreUtil.parseLevelAsInt(BendableScore.class, scoreString, scoreTokens[1][i]);
}
return of(hardScores, softScores);
}
/**
* @deprecated Use {@link #of(int[], int[])} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static BendableScore ofUninitialized(int initScore, int[] hardScores, int[] softScores) {
return BendableScore.of(hardScores, softScores);
}
/**
* Creates a new {@link BendableScore}.
*
* @param hardScores never change that array afterwards: it must be immutable
* @param softScores never change that array afterwards: it must be immutable
*/
public static BendableScore of(int[] hardScores, int[] softScores) {
return new BendableScore(hardScores, softScores);
}
/**
* Creates a new {@link BendableScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
*/
public static BendableScore zero(int hardLevelsSize, int softLevelsSize) {
return new BendableScore(new int[hardLevelsSize], new int[softLevelsSize]);
}
/**
* Creates a new {@link BendableScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
* @param hardLevel at least 0, less than hardLevelsSize
* @param hardScore any
*/
public static BendableScore ofHard(int hardLevelsSize, int softLevelsSize, int hardLevel, int hardScore) {
var hardScores = new int[hardLevelsSize];
hardScores[hardLevel] = hardScore;
return new BendableScore(hardScores, new int[softLevelsSize]);
}
/**
* Creates a new {@link BendableScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
* @param softLevel at least 0, less than softLevelsSize
* @param softScore any
*/
public static BendableScore ofSoft(int hardLevelsSize, int softLevelsSize, int softLevel, int softScore) {
var softScores = new int[softLevelsSize];
softScores[softLevel] = softScore;
return new BendableScore(new int[hardLevelsSize], softScores);
}
private final int[] hardScores;
private final int[] softScores;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private BendableScore() {
this(new int[] {}, new int[] {});
}
/**
*
*/
private BendableScore(int[] hardScores, int[] softScores) {
this.hardScores = hardScores;
this.softScores = softScores;
}
/**
* @return array copy because this class is immutable
*/
public int[] hardScores() {
return Arrays.copyOf(hardScores, hardScores.length);
}
/**
* As defined by {@link #hardScores()}.
*
* @deprecated Use {@link #hardScores()} instead.
*/
@Deprecated(forRemoval = true)
public int[] getHardScores() {
return hardScores();
}
/**
* @return array copy because this class is immutable
*/
public int[] softScores() {
return Arrays.copyOf(softScores, softScores.length);
}
/**
* As defined by {@link #softScores()}.
*
* @deprecated Use {@link #softScores()} instead.
*/
@Deprecated(forRemoval = true)
public int[] getSoftScores() {
return softScores();
}
@Override
public int hardLevelsSize() {
return hardScores.length;
}
/**
* @param hardLevel {@code 0 <= hardLevel <} {@link #hardLevelsSize()}.
* The {@code scoreLevel} is {@code hardLevel} for hard levels and {@code softLevel + hardLevelSize} for soft levels.
* @return higher is better
*/
public int hardScore(int hardLevel) {
return hardScores[hardLevel];
}
/**
* As defined by {@link #hardScore(int)}.
*
* @deprecated Use {@link #hardScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public int getHardScore(int hardLevel) {
return hardScore(hardLevel);
}
@Override
public int softLevelsSize() {
return softScores.length;
}
/**
* @param softLevel {@code 0 <= softLevel <} {@link #softLevelsSize()}.
* The {@code scoreLevel} is {@code hardLevel} for hard levels and {@code softLevel + hardLevelSize} for soft levels.
* @return higher is better
*/
public int softScore(int softLevel) {
return softScores[softLevel];
}
/**
* As defined by {@link #softScore(int)}.
*
* @deprecated Use {@link #softScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public int getSoftScore(int hardLevel) {
return softScore(hardLevel);
}
/**
* @param level {@code 0 <= level <} {@link #levelsSize()}
* @return higher is better
*/
public int hardOrSoftScore(int level) {
if (level < hardScores.length) {
return hardScores[level];
} else {
return softScores[level - hardScores.length];
}
}
/**
* As defined by {@link #hardOrSoftScore(int)}.
*
* @deprecated Use {@link #hardOrSoftScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public int getHardOrSoftScore(int level) {
return hardOrSoftScore(level);
}
@Override
public boolean isFeasible() {
for (var hardScore : hardScores) {
if (hardScore < 0) {
return false;
}
}
return true;
}
@Override
public BendableScore add(BendableScore addend) {
validateCompatible(addend);
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i] + addend.hardScore(i);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i] + addend.softScore(i);
}
return new BendableScore(
newHardScores, newSoftScores);
}
@Override
public BendableScore subtract(BendableScore subtrahend) {
validateCompatible(subtrahend);
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i] - subtrahend.hardScore(i);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i] - subtrahend.softScore(i);
}
return new BendableScore(
newHardScores, newSoftScores);
}
@Override
public BendableScore multiply(double multiplicand) {
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = (int) Math.floor(hardScores[i] * multiplicand);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = (int) Math.floor(softScores[i] * multiplicand);
}
return new BendableScore(
newHardScores, newSoftScores);
}
@Override
public BendableScore divide(double divisor) {
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = (int) Math.floor(hardScores[i] / divisor);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = (int) Math.floor(softScores[i] / divisor);
}
return new BendableScore(
newHardScores, newSoftScores);
}
@Override
public BendableScore power(double exponent) {
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = (int) Math.floor(Math.pow(hardScores[i], exponent));
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = (int) Math.floor(Math.pow(softScores[i], exponent));
}
return new BendableScore(
newHardScores, newSoftScores);
}
@Override
public BendableScore negate() { // Overridden as the default impl would create zero() all the time.
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = -hardScores[i];
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = -softScores[i];
}
return new BendableScore(newHardScores, newSoftScores);
}
@Override
public BendableScore abs() {
var newHardScores = new int[hardScores.length];
var newSoftScores = new int[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = Math.abs(hardScores[i]);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = Math.abs(softScores[i]);
}
return new BendableScore(newHardScores, newSoftScores);
}
@Override
public BendableScore zero() {
return BendableScore.zero(hardLevelsSize(), softLevelsSize());
}
@Override
public Number[] toLevelNumbers() {
var levelNumbers = new Number[hardScores.length + softScores.length];
for (var i = 0; i < hardScores.length; i++) {
levelNumbers[i] = hardScores[i];
}
for (var i = 0; i < softScores.length; i++) {
levelNumbers[hardScores.length + i] = softScores[i];
}
return levelNumbers;
}
@Override
public boolean equals(Object o) {
if (o instanceof BendableScore other) {
if (hardLevelsSize() != other.hardLevelsSize()
|| softLevelsSize() != other.softLevelsSize()) {
return false;
}
for (var i = 0; i < hardScores.length; i++) {
if (hardScores[i] != other.hardScore(i)) {
return false;
}
}
for (var i = 0; i < softScores.length; i++) {
if (softScores[i] != other.softScore(i)) {
return false;
}
}
return true;
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(hardScores), Arrays.hashCode(softScores));
}
@Override
public int compareTo(BendableScore other) {
validateCompatible(other);
for (var i = 0; i < hardScores.length; i++) {
if (hardScores[i] != other.hardScore(i)) {
return Integer.compare(hardScores[i], other.hardScore(i));
}
}
for (var i = 0; i < softScores.length; i++) {
if (softScores[i] != other.softScore(i)) {
return Integer.compare(softScores[i], other.softScore(i));
}
}
return 0;
}
@Override
public String toShortString() {
return ScoreUtil.buildBendableShortString(this, n -> n.intValue() != 0);
}
@Override
public String toString() {
var s = new StringBuilder(((hardScores.length + softScores.length) * 4) + 7);
s.append("[");
var first = true;
for (var hardScore : hardScores) {
if (first) {
first = false;
} else {
s.append("/");
}
s.append(hardScore);
}
s.append("]hard/[");
first = true;
for (var softScore : softScores) {
if (first) {
first = false;
} else {
s.append("/");
}
s.append(softScore);
}
s.append("]soft");
return s.toString();
}
public void validateCompatible(BendableScore other) {
if (hardLevelsSize() != other.hardLevelsSize()) {
throw new IllegalArgumentException("The score (" + this
+ ") with hardScoreSize (" + hardLevelsSize()
+ ") is not compatible with the other score (" + other
+ ") with hardScoreSize (" + other.hardLevelsSize() + ").");
}
if (softLevelsSize() != other.softLevelsSize()) {
throw new IllegalArgumentException("The score (" + this
+ ") with softScoreSize (" + softLevelsSize()
+ ") is not compatible with the other score (" + other
+ ") with softScoreSize (" + other.softLevelsSize() + ").");
}
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/bendablebigdecimal/BendableBigDecimalScore.java | package ai.timefold.solver.core.api.score.buildin.bendablebigdecimal;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Arrays;
import java.util.stream.Stream;
import ai.timefold.solver.core.api.score.IBendableScore;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import ai.timefold.solver.core.impl.score.buildin.BendableScoreDefinition;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on n levels of {@link BigDecimal} constraints.
* The number of levels is bendable at configuration time.
* <p>
* This class is immutable.
* <p>
* The {@link #hardLevelsSize()} and {@link #softLevelsSize()} must be the same as in the
* {@link BendableScoreDefinition} used.
*
* @see Score
*/
@NullMarked
public final class BendableBigDecimalScore implements IBendableScore<BendableBigDecimalScore> {
public static BendableBigDecimalScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseBendableScoreTokens(BendableBigDecimalScore.class, scoreString);
var hardScores = new BigDecimal[scoreTokens[0].length];
for (var i = 0; i < hardScores.length; i++) {
hardScores[i] = ScoreUtil.parseLevelAsBigDecimal(BendableBigDecimalScore.class, scoreString, scoreTokens[0][i]);
}
var softScores = new BigDecimal[scoreTokens[1].length];
for (var i = 0; i < softScores.length; i++) {
softScores[i] = ScoreUtil.parseLevelAsBigDecimal(BendableBigDecimalScore.class, scoreString, scoreTokens[1][i]);
}
return of(hardScores, softScores);
}
/**
* @deprecated Use {@link #of(BigDecimal[], BigDecimal[])} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static BendableBigDecimalScore ofUninitialized(int initScore, BigDecimal[] hardScores, BigDecimal[] softScores) {
return BendableBigDecimalScore.of(hardScores, softScores);
}
/**
* Creates a new {@link BendableBigDecimalScore}.
*
* @param hardScores never change that array afterwards: it must be immutable
* @param softScores never change that array afterwards: it must be immutable
*/
public static BendableBigDecimalScore of(BigDecimal[] hardScores, BigDecimal[] softScores) {
return new BendableBigDecimalScore(hardScores, softScores);
}
/**
* Creates a new {@link BendableBigDecimalScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
*/
public static BendableBigDecimalScore zero(int hardLevelsSize, int softLevelsSize) {
var hardScores = new BigDecimal[hardLevelsSize];
Arrays.fill(hardScores, BigDecimal.ZERO);
var softScores = new BigDecimal[softLevelsSize];
Arrays.fill(softScores, BigDecimal.ZERO);
return new BendableBigDecimalScore(hardScores, softScores);
}
/**
* Creates a new {@link BendableBigDecimalScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
* @param hardLevel at least 0, less than hardLevelsSize
*/
public static BendableBigDecimalScore ofHard(int hardLevelsSize, int softLevelsSize, int hardLevel, BigDecimal hardScore) {
var hardScores = new BigDecimal[hardLevelsSize];
Arrays.fill(hardScores, BigDecimal.ZERO);
var softScores = new BigDecimal[softLevelsSize];
Arrays.fill(softScores, BigDecimal.ZERO);
hardScores[hardLevel] = hardScore;
return new BendableBigDecimalScore(hardScores, softScores);
}
/**
* Creates a new {@link BendableBigDecimalScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
* @param softLevel at least 0, less than softLevelsSize
*/
public static BendableBigDecimalScore ofSoft(int hardLevelsSize, int softLevelsSize, int softLevel, BigDecimal softScore) {
var hardScores = new BigDecimal[hardLevelsSize];
Arrays.fill(hardScores, BigDecimal.ZERO);
var softScores = new BigDecimal[softLevelsSize];
Arrays.fill(softScores, BigDecimal.ZERO);
softScores[softLevel] = softScore;
return new BendableBigDecimalScore(hardScores, softScores);
}
private final BigDecimal[] hardScores;
private final BigDecimal[] softScores;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private BendableBigDecimalScore() {
this(new BigDecimal[] {}, new BigDecimal[] {});
}
/**
*
*/
private BendableBigDecimalScore(BigDecimal[] hardScores, BigDecimal[] softScores) {
this.hardScores = hardScores;
this.softScores = softScores;
}
/**
* @return array copy because this class is immutable
*/
public BigDecimal[] hardScores() {
return Arrays.copyOf(hardScores, hardScores.length);
}
/**
* As defined by {@link #hardScores()}.
*
* @deprecated Use {@link #hardScores()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal[] getHardScores() {
return hardScores();
}
/**
* @return array copy because this class is immutable
*/
public BigDecimal[] softScores() {
return Arrays.copyOf(softScores, softScores.length);
}
/**
* As defined by {@link #softScores()}.
*
* @deprecated Use {@link #softScores()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal[] getSoftScores() {
return softScores();
}
@Override
public int hardLevelsSize() {
return hardScores.length;
}
/**
* @param index {@code 0 <= index <} {@link #hardLevelsSize()}
* @return higher is better
*/
public BigDecimal hardScore(int index) {
return hardScores[index];
}
/**
* As defined by {@link #hardScore(int)}.
*
* @deprecated Use {@link #hardScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getHardScore(int index) {
return hardScore(index);
}
@Override
public int softLevelsSize() {
return softScores.length;
}
/**
* @param index {@code 0 <= index <} {@link #softLevelsSize()}
* @return higher is better
*/
public BigDecimal softScore(int index) {
return softScores[index];
}
/**
* As defined by {@link #softScore(int)}.
*
* @deprecated Use {@link #softScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getSoftScore(int index) {
return softScore(index);
}
/**
* @param index {@code 0 <= index <} {@link #levelsSize()}
* @return higher is better
*/
public BigDecimal hardOrSoftScore(int index) {
if (index < hardScores.length) {
return hardScores[index];
} else {
return softScores[index - hardScores.length];
}
}
/**
* As defined by {@link #hardOrSoftScore(int)}.
*
* @deprecated Use {@link #hardOrSoftScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getHardOrSoftScore(int index) {
return hardOrSoftScore(index);
}
@Override
public boolean isFeasible() {
for (var hardScore : hardScores) {
if (hardScore.compareTo(BigDecimal.ZERO) < 0) {
return false;
}
}
return true;
}
@Override
public BendableBigDecimalScore add(BendableBigDecimalScore addend) {
validateCompatible(addend);
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i].add(addend.hardScore(i));
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i].add(addend.softScore(i));
}
return new BendableBigDecimalScore(
newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore subtract(BendableBigDecimalScore subtrahend) {
validateCompatible(subtrahend);
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i].subtract(subtrahend.hardScore(i));
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i].subtract(subtrahend.softScore(i));
}
return new BendableBigDecimalScore(
newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore multiply(double multiplicand) {
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
var bigDecimalMultiplicand = BigDecimal.valueOf(multiplicand);
for (var i = 0; i < newHardScores.length; i++) {
// The (unspecified) scale/precision of the multiplicand should have no impact on the returned scale/precision
newHardScores[i] = hardScores[i].multiply(bigDecimalMultiplicand).setScale(hardScores[i].scale(),
RoundingMode.FLOOR);
}
for (var i = 0; i < newSoftScores.length; i++) {
// The (unspecified) scale/precision of the multiplicand should have no impact on the returned scale/precision
newSoftScores[i] = softScores[i].multiply(bigDecimalMultiplicand).setScale(softScores[i].scale(),
RoundingMode.FLOOR);
}
return new BendableBigDecimalScore(
newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore divide(double divisor) {
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
var bigDecimalDivisor = BigDecimal.valueOf(divisor);
for (var i = 0; i < newHardScores.length; i++) {
var hardScore = hardScores[i];
newHardScores[i] = hardScore.divide(bigDecimalDivisor, hardScore.scale(), RoundingMode.FLOOR);
}
for (var i = 0; i < newSoftScores.length; i++) {
var softScore = softScores[i];
newSoftScores[i] = softScore.divide(bigDecimalDivisor, softScore.scale(), RoundingMode.FLOOR);
}
return new BendableBigDecimalScore(
newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore power(double exponent) {
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
var actualExponent = BigDecimal.valueOf(exponent);
// The (unspecified) scale/precision of the exponent should have no impact on the returned scale/precision
// TODO FIXME remove .intValue() so non-integer exponents produce correct results
// None of the normal Java libraries support BigDecimal.pow(BigDecimal)
for (var i = 0; i < newHardScores.length; i++) {
var hardScore = hardScores[i];
newHardScores[i] = hardScore.pow(actualExponent.intValue()).setScale(hardScore.scale(), RoundingMode.FLOOR);
}
for (var i = 0; i < newSoftScores.length; i++) {
var softScore = softScores[i];
newSoftScores[i] = softScore.pow(actualExponent.intValue()).setScale(softScore.scale(), RoundingMode.FLOOR);
}
return new BendableBigDecimalScore(
newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore negate() { // Overridden as the default impl would create zero() all the time.
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i].negate();
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i].negate();
}
return new BendableBigDecimalScore(newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore abs() {
var newHardScores = new BigDecimal[hardScores.length];
var newSoftScores = new BigDecimal[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i].abs();
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i].abs();
}
return new BendableBigDecimalScore(newHardScores, newSoftScores);
}
@Override
public BendableBigDecimalScore zero() {
return BendableBigDecimalScore.zero(hardLevelsSize(), softLevelsSize());
}
@Override
public Number[] toLevelNumbers() {
var levelNumbers = new Number[hardScores.length + softScores.length];
System.arraycopy(hardScores, 0, levelNumbers, 0, hardScores.length);
System.arraycopy(softScores, 0, levelNumbers, hardScores.length, softScores.length);
return levelNumbers;
}
@Override
public boolean equals(Object o) {
if (o instanceof BendableBigDecimalScore other) {
if (hardLevelsSize() != other.hardLevelsSize()
|| softLevelsSize() != other.softLevelsSize()) {
return false;
}
for (var i = 0; i < hardScores.length; i++) {
if (!hardScores[i].stripTrailingZeros().equals(other.hardScore(i).stripTrailingZeros())) {
return false;
}
}
for (var i = 0; i < softScores.length; i++) {
if (!softScores[i].stripTrailingZeros().equals(other.softScore(i).stripTrailingZeros())) {
return false;
}
}
return true;
}
return false;
}
@Override
public int hashCode() {
var scoreHashCodes = Stream.concat(Arrays.stream(hardScores), Arrays.stream(softScores))
.map(BigDecimal::stripTrailingZeros)
.mapToInt(BigDecimal::hashCode)
.toArray();
return Arrays.hashCode(scoreHashCodes);
}
@Override
public int compareTo(BendableBigDecimalScore other) {
validateCompatible(other);
for (var i = 0; i < hardScores.length; i++) {
var hardScoreComparison = hardScores[i].compareTo(other.hardScore(i));
if (hardScoreComparison != 0) {
return hardScoreComparison;
}
}
for (var i = 0; i < softScores.length; i++) {
var softScoreComparison = softScores[i].compareTo(other.softScore(i));
if (softScoreComparison != 0) {
return softScoreComparison;
}
}
return 0;
}
@Override
public String toShortString() {
return ScoreUtil.buildBendableShortString(this, n -> ((BigDecimal) n).compareTo(BigDecimal.ZERO) != 0);
}
@Override
public String toString() {
var s = new StringBuilder(((hardScores.length + softScores.length) * 4) + 7);
s.append("[");
var first = true;
for (var hardScore : hardScores) {
if (first) {
first = false;
} else {
s.append("/");
}
s.append(hardScore);
}
s.append("]hard/[");
first = true;
for (var softScore : softScores) {
if (first) {
first = false;
} else {
s.append("/");
}
s.append(softScore);
}
s.append("]soft");
return s.toString();
}
public void validateCompatible(BendableBigDecimalScore other) {
if (hardLevelsSize() != other.hardLevelsSize()) {
throw new IllegalArgumentException("The score (" + this
+ ") with hardScoreSize (" + hardLevelsSize()
+ ") is not compatible with the other score (" + other
+ ") with hardScoreSize (" + other.hardLevelsSize() + ").");
}
if (softLevelsSize() != other.softLevelsSize()) {
throw new IllegalArgumentException("The score (" + this
+ ") with softScoreSize (" + softLevelsSize()
+ ") is not compatible with the other score (" + other
+ ") with softScoreSize (" + other.softLevelsSize() + ").");
}
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/bendablelong/BendableLongScore.java | package ai.timefold.solver.core.api.score.buildin.bendablelong;
import java.util.Arrays;
import java.util.Objects;
import ai.timefold.solver.core.api.score.IBendableScore;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import ai.timefold.solver.core.impl.score.buildin.BendableLongScoreDefinition;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on n levels of long constraints.
* The number of levels is bendable at configuration time.
* <p>
* This class is immutable.
* <p>
* The {@link #hardLevelsSize()} and {@link #softLevelsSize()} must be the same as in the
* {@link BendableLongScoreDefinition} used.
*
* @see Score
*/
@NullMarked
public final class BendableLongScore implements IBendableScore<BendableLongScore> {
public static BendableLongScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseBendableScoreTokens(BendableLongScore.class, scoreString);
var hardScores = new long[scoreTokens[0].length];
for (var i = 0; i < hardScores.length; i++) {
hardScores[i] = ScoreUtil.parseLevelAsLong(BendableLongScore.class, scoreString, scoreTokens[0][i]);
}
var softScores = new long[scoreTokens[1].length];
for (var i = 0; i < softScores.length; i++) {
softScores[i] = ScoreUtil.parseLevelAsLong(BendableLongScore.class, scoreString, scoreTokens[1][i]);
}
return of(hardScores, softScores);
}
/**
* @deprecated Use {@link #of(long[], long[])} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static BendableLongScore ofUninitialized(int initScore, long[] hardScores, long[] softScores) {
return BendableLongScore.of(hardScores, softScores);
}
/**
* Creates a new {@link BendableLongScore}.
*
* @param hardScores never change that array afterwards: it must be immutable
* @param softScores never change that array afterwards: it must be immutable
*/
public static BendableLongScore of(long[] hardScores, long[] softScores) {
return new BendableLongScore(hardScores, softScores);
}
/**
* Creates a new {@link BendableLongScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
*/
public static BendableLongScore zero(int hardLevelsSize, int softLevelsSize) {
return new BendableLongScore(new long[hardLevelsSize], new long[softLevelsSize]);
}
/**
* Creates a new {@link BendableLongScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
* @param hardLevel at least 0, less than hardLevelsSize
* @param hardScore any
*/
public static BendableLongScore ofHard(int hardLevelsSize, int softLevelsSize, int hardLevel, long hardScore) {
var hardScores = new long[hardLevelsSize];
hardScores[hardLevel] = hardScore;
return new BendableLongScore(hardScores, new long[softLevelsSize]);
}
/**
* Creates a new {@link BendableLongScore}.
*
* @param hardLevelsSize at least 0
* @param softLevelsSize at least 0
* @param softLevel at least 0, less than softLevelsSize
* @param softScore any
*/
public static BendableLongScore ofSoft(int hardLevelsSize, int softLevelsSize, int softLevel, long softScore) {
var softScores = new long[softLevelsSize];
softScores[softLevel] = softScore;
return new BendableLongScore(new long[hardLevelsSize], softScores);
}
private final long[] hardScores;
private final long[] softScores;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private BendableLongScore() {
this(new long[] {}, new long[] {});
}
/**
*
*/
private BendableLongScore(long[] hardScores, long[] softScores) {
this.hardScores = hardScores;
this.softScores = softScores;
}
/**
* @return array copy because this class is immutable
*/
public long[] hardScores() {
return Arrays.copyOf(hardScores, hardScores.length);
}
/**
* As defined by {@link #hardScores()}.
*
* @deprecated Use {@link #hardScores()} instead.
*/
@Deprecated(forRemoval = true)
public long[] getHardScores() {
return hardScores();
}
/**
* @return array copy because this class is immutable
*/
public long[] softScores() {
return Arrays.copyOf(softScores, softScores.length);
}
/**
* As defined by {@link #softScores()}.
*
* @deprecated Use {@link #softScores()} instead.
*/
@Deprecated(forRemoval = true)
public long[] getSoftScores() {
return softScores();
}
@Override
public int hardLevelsSize() {
return hardScores.length;
}
/**
* @param index {@code 0 <= index <} {@link #hardLevelsSize()}
* @return higher is better
*/
public long hardScore(int index) {
return hardScores[index];
}
/**
* As defined by {@link #hardScore(int)}.
*
* @deprecated Use {@link #hardScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public long getHardScore(int index) {
return hardScore(index);
}
@Override
public int softLevelsSize() {
return softScores.length;
}
/**
* @param index {@code 0 <= index <} {@link #softLevelsSize()}
* @return higher is better
*/
public long softScore(int index) {
return softScores[index];
}
/**
* As defined by {@link #softScore(int)}.
*
* @deprecated Use {@link #softScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public long getSoftScore(int index) {
return softScore(index);
}
/**
* @param index {@code 0 <= index <} {@link #levelsSize()}
* @return higher is better
*/
public long hardOrSoftScore(int index) {
if (index < hardScores.length) {
return hardScores[index];
} else {
return softScores[index - hardScores.length];
}
}
/**
* As defined by {@link #hardOrSoftScore(int)}.
*
* @deprecated Use {@link #hardOrSoftScore(int)} instead.
*/
@Deprecated(forRemoval = true)
public long getHardOrSoftScore(int index) {
return hardOrSoftScore(index);
}
@Override
public boolean isFeasible() {
for (var hardScore : hardScores) {
if (hardScore < 0) {
return false;
}
}
return true;
}
@Override
public BendableLongScore add(BendableLongScore addend) {
validateCompatible(addend);
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i] + addend.hardScore(i);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i] + addend.softScore(i);
}
return new BendableLongScore(
newHardScores, newSoftScores);
}
@Override
public BendableLongScore subtract(BendableLongScore subtrahend) {
validateCompatible(subtrahend);
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = hardScores[i] - subtrahend.hardScore(i);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = softScores[i] - subtrahend.softScore(i);
}
return new BendableLongScore(
newHardScores, newSoftScores);
}
@Override
public BendableLongScore multiply(double multiplicand) {
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = (long) Math.floor(hardScores[i] * multiplicand);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = (long) Math.floor(softScores[i] * multiplicand);
}
return new BendableLongScore(
newHardScores, newSoftScores);
}
@Override
public BendableLongScore divide(double divisor) {
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = (long) Math.floor(hardScores[i] / divisor);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = (long) Math.floor(softScores[i] / divisor);
}
return new BendableLongScore(
newHardScores, newSoftScores);
}
@Override
public BendableLongScore power(double exponent) {
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = (long) Math.floor(Math.pow(hardScores[i], exponent));
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = (long) Math.floor(Math.pow(softScores[i], exponent));
}
return new BendableLongScore(
newHardScores, newSoftScores);
}
@Override
public BendableLongScore negate() { // Overridden as the default impl would create zero() all the time.
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = -hardScores[i];
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = -softScores[i];
}
return new BendableLongScore(newHardScores, newSoftScores);
}
@Override
public BendableLongScore abs() {
var newHardScores = new long[hardScores.length];
var newSoftScores = new long[softScores.length];
for (var i = 0; i < newHardScores.length; i++) {
newHardScores[i] = Math.abs(hardScores[i]);
}
for (var i = 0; i < newSoftScores.length; i++) {
newSoftScores[i] = Math.abs(softScores[i]);
}
return new BendableLongScore(newHardScores, newSoftScores);
}
@Override
public BendableLongScore zero() {
return BendableLongScore.zero(hardLevelsSize(), softLevelsSize());
}
@Override
public Number[] toLevelNumbers() {
var levelNumbers = new Number[hardScores.length + softScores.length];
for (var i = 0; i < hardScores.length; i++) {
levelNumbers[i] = hardScores[i];
}
for (var i = 0; i < softScores.length; i++) {
levelNumbers[hardScores.length + i] = softScores[i];
}
return levelNumbers;
}
@Override
public boolean equals(Object o) {
if (o instanceof BendableLongScore other) {
if (hardLevelsSize() != other.hardLevelsSize()
|| softLevelsSize() != other.softLevelsSize()) {
return false;
}
for (var i = 0; i < hardScores.length; i++) {
if (hardScores[i] != other.hardScore(i)) {
return false;
}
}
for (var i = 0; i < softScores.length; i++) {
if (softScores[i] != other.softScore(i)) {
return false;
}
}
return true;
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(hardScores), Arrays.hashCode(softScores));
}
@Override
public int compareTo(BendableLongScore other) {
validateCompatible(other);
for (var i = 0; i < hardScores.length; i++) {
if (hardScores[i] != other.hardScore(i)) {
return Long.compare(hardScores[i], other.hardScore(i));
}
}
for (var i = 0; i < softScores.length; i++) {
if (softScores[i] != other.softScore(i)) {
return Long.compare(softScores[i], other.softScore(i));
}
}
return 0;
}
@Override
public String toShortString() {
return ScoreUtil.buildBendableShortString(this, n -> n.longValue() != 0L);
}
@Override
public String toString() {
var s = new StringBuilder(((hardScores.length + softScores.length) * 4) + 7);
s.append("[");
var first = true;
for (var hardScore : hardScores) {
if (first) {
first = false;
} else {
s.append("/");
}
s.append(hardScore);
}
s.append("]hard/[");
first = true;
for (var softScore : softScores) {
if (first) {
first = false;
} else {
s.append("/");
}
s.append(softScore);
}
s.append("]soft");
return s.toString();
}
public void validateCompatible(BendableLongScore other) {
if (hardLevelsSize() != other.hardLevelsSize()) {
throw new IllegalArgumentException("The score (" + this
+ ") with hardScoreSize (" + hardLevelsSize()
+ ") is not compatible with the other score (" + other
+ ") with hardScoreSize (" + other.hardLevelsSize() + ").");
}
if (softLevelsSize() != other.softLevelsSize()) {
throw new IllegalArgumentException("The score (" + this
+ ") with softScoreSize (" + softLevelsSize()
+ ") is not compatible with the other score (" + other
+ ") with softScoreSize (" + other.softLevelsSize() + ").");
}
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/hardmediumsoft/HardMediumSoftScore.java | package ai.timefold.solver.core.api.score.buildin.hardmediumsoft;
import static ai.timefold.solver.core.impl.score.ScoreUtil.HARD_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.MEDIUM_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.SOFT_LABEL;
import java.util.Objects;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 3 levels of int constraints: hard, medium and soft.
* Hard constraints have priority over medium constraints.
* Medium constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class HardMediumSoftScore implements Score<HardMediumSoftScore> {
public static final HardMediumSoftScore ZERO = new HardMediumSoftScore(0, 0, 0);
public static final HardMediumSoftScore ONE_HARD = new HardMediumSoftScore(1, 0, 0);
private static final HardMediumSoftScore MINUS_ONE_HARD = new HardMediumSoftScore(-1, 0, 0);
public static final HardMediumSoftScore ONE_MEDIUM = new HardMediumSoftScore(0, 1, 0);
private static final HardMediumSoftScore MINUS_ONE_MEDIUM = new HardMediumSoftScore(0, -1, 0);
public static final HardMediumSoftScore ONE_SOFT = new HardMediumSoftScore(0, 0, 1);
private static final HardMediumSoftScore MINUS_ONE_SOFT = new HardMediumSoftScore(0, 0, -1);
public static HardMediumSoftScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(HardMediumSoftScore.class, scoreString,
HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
var hardScore = ScoreUtil.parseLevelAsInt(HardMediumSoftScore.class, scoreString, scoreTokens[0]);
var mediumScore = ScoreUtil.parseLevelAsInt(HardMediumSoftScore.class, scoreString, scoreTokens[1]);
var softScore = ScoreUtil.parseLevelAsInt(HardMediumSoftScore.class, scoreString, scoreTokens[2]);
return of(hardScore, mediumScore, softScore);
}
/**
* @deprecated Use {@link #of(int, int, int)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static HardMediumSoftScore ofUninitialized(int initScore, int hardScore, int mediumScore, int softScore) {
return of(hardScore, mediumScore, softScore);
}
public static HardMediumSoftScore of(int hardScore, int mediumScore, int softScore) {
if (hardScore == -1 && mediumScore == 0 && softScore == 0) {
return MINUS_ONE_HARD;
} else if (hardScore == 0) {
if (mediumScore == -1 && softScore == 0) {
return MINUS_ONE_MEDIUM;
} else if (mediumScore == 0) {
if (softScore == -1) {
return MINUS_ONE_SOFT;
} else if (softScore == 0) {
return ZERO;
} else if (softScore == 1) {
return ONE_SOFT;
}
} else if (mediumScore == 1 && softScore == 0) {
return ONE_MEDIUM;
}
} else if (hardScore == 1 && mediumScore == 0 && softScore == 0) {
return ONE_HARD;
}
return new HardMediumSoftScore(hardScore, mediumScore, softScore);
}
public static HardMediumSoftScore ofHard(int hardScore) {
return switch (hardScore) {
case -1 -> MINUS_ONE_HARD;
case 0 -> ZERO;
case 1 -> ONE_HARD;
default -> new HardMediumSoftScore(hardScore, 0, 0);
};
}
public static HardMediumSoftScore ofMedium(int mediumScore) {
return switch (mediumScore) {
case -1 -> MINUS_ONE_MEDIUM;
case 0 -> ZERO;
case 1 -> ONE_MEDIUM;
default -> new HardMediumSoftScore(0, mediumScore, 0);
};
}
public static HardMediumSoftScore ofSoft(int softScore) {
return switch (softScore) {
case -1 -> MINUS_ONE_SOFT;
case 0 -> ZERO;
case 1 -> ONE_SOFT;
default -> new HardMediumSoftScore(0, 0, softScore);
};
}
private final int hardScore;
private final int mediumScore;
private final int softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private HardMediumSoftScore() {
this(Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MIN_VALUE);
}
private HardMediumSoftScore(int hardScore, int mediumScore, int softScore) {
this.hardScore = hardScore;
this.mediumScore = mediumScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public int hardScore() {
return hardScore;
}
/**
* As defined by {@link #hardScore()}.
*
* @deprecated Use {@link #hardScore()} instead.
*/
@Deprecated(forRemoval = true)
public int getHardScore() {
return hardScore;
}
/**
* The total of the broken negative medium constraints and fulfilled positive medium constraints.
* Their weight is included in the total.
* The medium score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the medium score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no medium constraints are broken/fulfilled
*/
public int mediumScore() {
return mediumScore;
}
/**
* As defined by {@link #mediumScore()}.
*
* @deprecated Use {@link #mediumScore()} instead.
*/
@Deprecated(forRemoval = true)
public int getMediumScore() {
return mediumScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard and medium score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public int softScore() {
return softScore;
}
/**
* As defined by {@link #softScore()}.
*
* @deprecated Use {@link #softScore()} instead.
*/
@Deprecated(forRemoval = true)
public int getSoftScore() {
return softScore;
}
/**
* A {@link PlanningSolution} is feasible if it has no broken hard constraints.
*
* @return true if the {@link #hardScore()} is 0 or higher
*/
@Override
public boolean isFeasible() {
return hardScore >= 0;
}
@Override
public HardMediumSoftScore add(HardMediumSoftScore addend) {
return of(hardScore + addend.hardScore(),
mediumScore + addend.mediumScore(),
softScore + addend.softScore());
}
@Override
public HardMediumSoftScore subtract(HardMediumSoftScore subtrahend) {
return of(hardScore - subtrahend.hardScore(),
mediumScore - subtrahend.mediumScore(),
softScore - subtrahend.softScore());
}
@Override
public HardMediumSoftScore multiply(double multiplicand) {
return of((int) Math.floor(hardScore * multiplicand),
(int) Math.floor(mediumScore * multiplicand),
(int) Math.floor(softScore * multiplicand));
}
@Override
public HardMediumSoftScore divide(double divisor) {
return of((int) Math.floor(hardScore / divisor),
(int) Math.floor(mediumScore / divisor),
(int) Math.floor(softScore / divisor));
}
@Override
public HardMediumSoftScore power(double exponent) {
return of((int) Math.floor(Math.pow(hardScore, exponent)),
(int) Math.floor(Math.pow(mediumScore, exponent)),
(int) Math.floor(Math.pow(softScore, exponent)));
}
@Override
public HardMediumSoftScore abs() {
return of(Math.abs(hardScore), Math.abs(mediumScore), Math.abs(softScore));
}
@Override
public HardMediumSoftScore zero() {
return HardMediumSoftScore.ZERO;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, mediumScore, softScore };
}
@Override
public boolean equals(Object o) {
if (o instanceof HardMediumSoftScore other) {
return hardScore == other.hardScore()
&& mediumScore == other.mediumScore()
&& softScore == other.softScore();
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hardScore, mediumScore, softScore);
}
@Override
public int compareTo(HardMediumSoftScore other) {
if (hardScore != other.hardScore()) {
return Integer.compare(hardScore, other.hardScore());
} else if (mediumScore != other.mediumScore()) {
return Integer.compare(mediumScore, other.mediumScore());
} else {
return Integer.compare(softScore, other.softScore());
}
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> n.intValue() != 0, HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return hardScore + HARD_LABEL + "/" + mediumScore + MEDIUM_LABEL + "/" + softScore + SOFT_LABEL;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/hardmediumsoftbigdecimal/HardMediumSoftBigDecimalScore.java | package ai.timefold.solver.core.api.score.buildin.hardmediumsoftbigdecimal;
import static ai.timefold.solver.core.impl.score.ScoreUtil.HARD_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.MEDIUM_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.SOFT_LABEL;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Objects;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 3 levels of {@link BigDecimal} constraints: hard, medium and soft.
* Hard constraints have priority over medium constraints.
* Medium constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class HardMediumSoftBigDecimalScore implements Score<HardMediumSoftBigDecimalScore> {
public static final HardMediumSoftBigDecimalScore ZERO = new HardMediumSoftBigDecimalScore(BigDecimal.ZERO,
BigDecimal.ZERO, BigDecimal.ZERO);
public static final HardMediumSoftBigDecimalScore ONE_HARD = new HardMediumSoftBigDecimalScore(BigDecimal.ONE,
BigDecimal.ZERO, BigDecimal.ZERO);
private static final HardMediumSoftBigDecimalScore MINUS_ONE_HARD =
new HardMediumSoftBigDecimalScore(BigDecimal.ONE.negate(),
BigDecimal.ZERO, BigDecimal.ZERO);
public static final HardMediumSoftBigDecimalScore ONE_MEDIUM =
new HardMediumSoftBigDecimalScore(BigDecimal.ZERO,
BigDecimal.ONE, BigDecimal.ZERO);
private static final HardMediumSoftBigDecimalScore MINUS_ONE_MEDIUM =
new HardMediumSoftBigDecimalScore(BigDecimal.ZERO,
BigDecimal.ONE.negate(), BigDecimal.ZERO);
public static final HardMediumSoftBigDecimalScore ONE_SOFT = new HardMediumSoftBigDecimalScore(BigDecimal.ZERO,
BigDecimal.ZERO, BigDecimal.ONE);
private static final HardMediumSoftBigDecimalScore MINUS_ONE_SOFT =
new HardMediumSoftBigDecimalScore(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ONE.negate());
public static HardMediumSoftBigDecimalScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(HardMediumSoftBigDecimalScore.class, scoreString,
HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
var hardScore = ScoreUtil.parseLevelAsBigDecimal(HardMediumSoftBigDecimalScore.class, scoreString, scoreTokens[0]);
var mediumScore = ScoreUtil.parseLevelAsBigDecimal(HardMediumSoftBigDecimalScore.class, scoreString, scoreTokens[1]);
var softScore = ScoreUtil.parseLevelAsBigDecimal(HardMediumSoftBigDecimalScore.class, scoreString, scoreTokens[2]);
return of(hardScore, mediumScore, softScore);
}
/**
* @deprecated Use {@link #of(BigDecimal, BigDecimal, BigDecimal)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static HardMediumSoftBigDecimalScore ofUninitialized(int initScore, BigDecimal hardScore, BigDecimal mediumScore,
BigDecimal softScore) {
return of(hardScore, mediumScore, softScore);
}
public static HardMediumSoftBigDecimalScore of(BigDecimal hardScore, BigDecimal mediumScore,
BigDecimal softScore) {
if (Objects.equals(hardScore, BigDecimal.ONE.negate()) && mediumScore.signum() == 0 && softScore.signum() == 0) {
return MINUS_ONE_HARD;
} else if (hardScore.signum() == 0) {
if (Objects.equals(mediumScore, BigDecimal.ONE.negate()) && softScore.signum() == 0) {
return MINUS_ONE_MEDIUM;
} else if (mediumScore.signum() == 0) {
if (Objects.equals(softScore, BigDecimal.ONE.negate())) {
return MINUS_ONE_SOFT;
} else if (softScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(softScore, BigDecimal.ONE)) {
return ONE_SOFT;
}
} else if (Objects.equals(mediumScore, BigDecimal.ONE) && softScore.signum() == 0) {
return ONE_MEDIUM;
}
} else if (Objects.equals(hardScore, BigDecimal.ONE) && mediumScore.signum() == 0 && softScore.signum() == 0) {
return ONE_HARD;
}
return new HardMediumSoftBigDecimalScore(hardScore, mediumScore, softScore);
}
public static HardMediumSoftBigDecimalScore ofHard(BigDecimal hardScore) {
if (Objects.equals(hardScore, BigDecimal.ONE.negate())) {
return MINUS_ONE_HARD;
} else if (hardScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(hardScore, BigDecimal.ONE)) {
return ONE_HARD;
}
return new HardMediumSoftBigDecimalScore(hardScore, BigDecimal.ZERO, BigDecimal.ZERO);
}
public static HardMediumSoftBigDecimalScore ofMedium(BigDecimal mediumScore) {
if (Objects.equals(mediumScore, BigDecimal.ONE.negate())) {
return MINUS_ONE_MEDIUM;
} else if (mediumScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(mediumScore, BigDecimal.ONE)) {
return ONE_MEDIUM;
}
return new HardMediumSoftBigDecimalScore(BigDecimal.ZERO, mediumScore, BigDecimal.ZERO);
}
public static HardMediumSoftBigDecimalScore ofSoft(BigDecimal softScore) {
if (Objects.equals(softScore, BigDecimal.ONE.negate())) {
return MINUS_ONE_SOFT;
} else if (softScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(softScore, BigDecimal.ONE)) {
return ONE_SOFT;
}
return new HardMediumSoftBigDecimalScore(BigDecimal.ZERO, BigDecimal.ZERO, softScore);
}
private final BigDecimal hardScore;
private final BigDecimal mediumScore;
private final BigDecimal softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private HardMediumSoftBigDecimalScore() {
this(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO);
}
private HardMediumSoftBigDecimalScore(BigDecimal hardScore, BigDecimal mediumScore, BigDecimal softScore) {
this.hardScore = hardScore;
this.mediumScore = mediumScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public BigDecimal hardScore() {
return hardScore;
}
/**
* As defined by {@link #hardScore()}.
*
* @deprecated Use {@link #hardScore()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getHardScore() {
return hardScore;
}
/**
* The total of the broken negative medium constraints and fulfilled positive medium constraints.
* Their weight is included in the total.
* The medium score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the medium score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no medium constraints are broken/fulfilled
*/
public BigDecimal mediumScore() {
return mediumScore;
}
/**
* As defined by {@link #mediumScore()}.
*
* @deprecated Use {@link #mediumScore()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getMediumScore() {
return mediumScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard and medium score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public BigDecimal softScore() {
return softScore;
}
/**
* As defined by {@link #softScore()}.
*
* @deprecated Use {@link #softScore()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getSoftScore() {
return softScore;
}
/**
* A {@link PlanningSolution} is feasible if it has no broken hard constraints.
*
* @return true if the {@link #hardScore()} is 0 or higher
*/
@Override
public boolean isFeasible() {
return hardScore.compareTo(BigDecimal.ZERO) >= 0;
}
@Override
public HardMediumSoftBigDecimalScore add(HardMediumSoftBigDecimalScore addend) {
return of(hardScore.add(addend.hardScore()),
mediumScore.add(addend.mediumScore()),
softScore.add(addend.softScore()));
}
@Override
public HardMediumSoftBigDecimalScore subtract(HardMediumSoftBigDecimalScore subtrahend) {
return of(hardScore.subtract(subtrahend.hardScore()),
mediumScore.subtract(subtrahend.mediumScore()),
softScore.subtract(subtrahend.softScore()));
}
@Override
public HardMediumSoftBigDecimalScore multiply(double multiplicand) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var multiplicandBigDecimal = BigDecimal.valueOf(multiplicand);
// The (unspecified) scale/precision of the multiplicand should have no impact on the returned scale/precision
return of(hardScore.multiply(multiplicandBigDecimal).setScale(hardScore.scale(), RoundingMode.FLOOR),
mediumScore.multiply(multiplicandBigDecimal).setScale(mediumScore.scale(), RoundingMode.FLOOR),
softScore.multiply(multiplicandBigDecimal).setScale(softScore.scale(), RoundingMode.FLOOR));
}
@Override
public HardMediumSoftBigDecimalScore divide(double divisor) {
var divisorBigDecimal = BigDecimal.valueOf(divisor);
// The (unspecified) scale/precision of the divisor should have no impact on the returned scale/precision
return of(hardScore.divide(divisorBigDecimal, hardScore.scale(), RoundingMode.FLOOR),
mediumScore.divide(divisorBigDecimal, mediumScore.scale(), RoundingMode.FLOOR),
softScore.divide(divisorBigDecimal, softScore.scale(), RoundingMode.FLOOR));
}
@Override
public HardMediumSoftBigDecimalScore power(double exponent) {
var exponentBigDecimal = BigDecimal.valueOf(exponent);
// The (unspecified) scale/precision of the exponent should have no impact on the returned scale/precision
// TODO FIXME remove .intValue() so non-integer exponents produce correct results
// None of the normal Java libraries support BigDecimal.pow(BigDecimal)
return of(hardScore.pow(exponentBigDecimal.intValue()).setScale(hardScore.scale(), RoundingMode.FLOOR),
mediumScore.pow(exponentBigDecimal.intValue()).setScale(mediumScore.scale(), RoundingMode.FLOOR),
softScore.pow(exponentBigDecimal.intValue()).setScale(softScore.scale(), RoundingMode.FLOOR));
}
@Override
public HardMediumSoftBigDecimalScore abs() {
return of(hardScore.abs(), mediumScore.abs(), softScore.abs());
}
@Override
public HardMediumSoftBigDecimalScore zero() {
return HardMediumSoftBigDecimalScore.ZERO;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, mediumScore, softScore };
}
@Override
public boolean equals(Object o) {
if (o instanceof HardMediumSoftBigDecimalScore other) {
return hardScore.stripTrailingZeros().equals(other.hardScore().stripTrailingZeros())
&& mediumScore.stripTrailingZeros().equals(other.mediumScore().stripTrailingZeros())
&& softScore.stripTrailingZeros().equals(other.softScore().stripTrailingZeros());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hardScore.stripTrailingZeros(), mediumScore.stripTrailingZeros(), softScore.stripTrailingZeros());
}
@Override
public int compareTo(HardMediumSoftBigDecimalScore other) {
var hardScoreComparison = hardScore.compareTo(other.hardScore());
if (hardScoreComparison != 0) {
return hardScoreComparison;
}
var mediumScoreComparison = mediumScore.compareTo(other.mediumScore());
if (mediumScoreComparison != 0) {
return mediumScoreComparison;
} else {
return softScore.compareTo(other.softScore());
}
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> ((BigDecimal) n).compareTo(BigDecimal.ZERO) != 0,
HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return hardScore + HARD_LABEL + "/" + mediumScore + MEDIUM_LABEL + "/" + softScore + SOFT_LABEL;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/hardmediumsoftlong/HardMediumSoftLongScore.java | package ai.timefold.solver.core.api.score.buildin.hardmediumsoftlong;
import static ai.timefold.solver.core.impl.score.ScoreUtil.HARD_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.MEDIUM_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.SOFT_LABEL;
import java.util.Objects;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 3 levels of long constraints: hard, medium and soft.
* Hard constraints have priority over medium constraints.
* Medium constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class HardMediumSoftLongScore implements Score<HardMediumSoftLongScore> {
public static final HardMediumSoftLongScore ZERO = new HardMediumSoftLongScore(0L, 0L, 0L);
public static final HardMediumSoftLongScore ONE_HARD = new HardMediumSoftLongScore(1L, 0L, 0L);
private static final HardMediumSoftLongScore MINUS_ONE_HARD = new HardMediumSoftLongScore(-1L, 0L, 0L);
public static final HardMediumSoftLongScore ONE_MEDIUM = new HardMediumSoftLongScore(0L, 1L, 0L);
private static final HardMediumSoftLongScore MINUS_ONE_MEDIUM = new HardMediumSoftLongScore(0L, -1L, 0L);
public static final HardMediumSoftLongScore ONE_SOFT = new HardMediumSoftLongScore(0L, 0L, 1L);
private static final HardMediumSoftLongScore MINUS_ONE_SOFT = new HardMediumSoftLongScore(0L, 0L, -1L);
public static HardMediumSoftLongScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(HardMediumSoftLongScore.class, scoreString,
HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
var hardScore = ScoreUtil.parseLevelAsLong(HardMediumSoftLongScore.class, scoreString, scoreTokens[0]);
var mediumScore = ScoreUtil.parseLevelAsLong(HardMediumSoftLongScore.class, scoreString, scoreTokens[1]);
var softScore = ScoreUtil.parseLevelAsLong(HardMediumSoftLongScore.class, scoreString, scoreTokens[2]);
return of(hardScore, mediumScore, softScore);
}
/**
* @deprecated Use {@link #of(long, long, long)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static HardMediumSoftLongScore ofUninitialized(int initScore, long hardScore, long mediumScore,
long softScore) {
return of(hardScore, mediumScore, softScore);
}
public static HardMediumSoftLongScore of(long hardScore, long mediumScore, long softScore) {
if (hardScore == -1L && mediumScore == 0L && softScore == 0L) {
return MINUS_ONE_HARD;
} else if (hardScore == 0L) {
if (mediumScore == -1L && softScore == 0L) {
return MINUS_ONE_MEDIUM;
} else if (mediumScore == 0L) {
if (softScore == -1L) {
return MINUS_ONE_SOFT;
} else if (softScore == 0L) {
return ZERO;
} else if (softScore == 1L) {
return ONE_SOFT;
}
} else if (mediumScore == 1L && softScore == 0L) {
return ONE_MEDIUM;
}
} else if (hardScore == 1L && mediumScore == 0L && softScore == 0L) {
return ONE_HARD;
}
return new HardMediumSoftLongScore(hardScore, mediumScore, softScore);
}
public static HardMediumSoftLongScore ofHard(long hardScore) {
if (hardScore == -1L) {
return MINUS_ONE_HARD;
} else if (hardScore == 0L) {
return ZERO;
} else if (hardScore == 1L) {
return ONE_HARD;
}
return new HardMediumSoftLongScore(hardScore, 0L, 0L);
}
public static HardMediumSoftLongScore ofMedium(long mediumScore) {
if (mediumScore == -1L) {
return MINUS_ONE_MEDIUM;
} else if (mediumScore == 0L) {
return ZERO;
} else if (mediumScore == 1L) {
return ONE_MEDIUM;
}
return new HardMediumSoftLongScore(0L, mediumScore, 0L);
}
public static HardMediumSoftLongScore ofSoft(long softScore) {
if (softScore == -1L) {
return MINUS_ONE_SOFT;
} else if (softScore == 0L) {
return ZERO;
} else if (softScore == 1L) {
return ONE_SOFT;
}
return new HardMediumSoftLongScore(0L, 0L, softScore);
}
private final long hardScore;
private final long mediumScore;
private final long softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private HardMediumSoftLongScore() {
this(Long.MIN_VALUE, Long.MIN_VALUE, Long.MIN_VALUE);
}
private HardMediumSoftLongScore(long hardScore, long mediumScore, long softScore) {
this.hardScore = hardScore;
this.mediumScore = mediumScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public long hardScore() {
return hardScore;
}
/**
* As defined by {@link #hardScore()}.
*
* @deprecated Use {@link #hardScore()} instead.
*/
@Deprecated(forRemoval = true)
public long getHardScore() {
return hardScore;
}
/**
* The total of the broken negative medium constraints and fulfilled positive medium constraints.
* Their weight is included in the total.
* The medium score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the medium score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no medium constraints are broken/fulfilled
*/
public long mediumScore() {
return mediumScore;
}
/**
* As defined by {@link #mediumScore()}.
*
* @deprecated Use {@link #mediumScore()} instead.
*/
@Deprecated(forRemoval = true)
public long getMediumScore() {
return mediumScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard and medium score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public long softScore() {
return softScore;
}
/**
* As defined by {@link #softScore()}.
*
* @deprecated Use {@link #softScore()} instead.
*/
@Deprecated(forRemoval = true)
public long getSoftScore() {
return softScore;
}
/**
* A {@link PlanningSolution} is feasible if it has no broken hard constraints.
*
* @return true if the {@link #hardScore()} is 0 or higher
*/
@Override
public boolean isFeasible() {
return hardScore >= 0L;
}
@Override
public HardMediumSoftLongScore add(HardMediumSoftLongScore addend) {
return of(hardScore + addend.hardScore(),
mediumScore + addend.mediumScore(),
softScore + addend.softScore());
}
@Override
public HardMediumSoftLongScore subtract(HardMediumSoftLongScore subtrahend) {
return of(hardScore - subtrahend.hardScore(),
mediumScore - subtrahend.mediumScore(),
softScore - subtrahend.softScore());
}
@Override
public HardMediumSoftLongScore multiply(double multiplicand) {
return of((long) Math.floor(hardScore * multiplicand),
(long) Math.floor(mediumScore * multiplicand),
(long) Math.floor(softScore * multiplicand));
}
@Override
public HardMediumSoftLongScore divide(double divisor) {
return of((long) Math.floor(hardScore / divisor),
(long) Math.floor(mediumScore / divisor),
(long) Math.floor(softScore / divisor));
}
@Override
public HardMediumSoftLongScore power(double exponent) {
return of((long) Math.floor(Math.pow(hardScore, exponent)),
(long) Math.floor(Math.pow(mediumScore, exponent)),
(long) Math.floor(Math.pow(softScore, exponent)));
}
@Override
public HardMediumSoftLongScore abs() {
return of(Math.abs(hardScore), Math.abs(mediumScore), Math.abs(softScore));
}
@Override
public HardMediumSoftLongScore zero() {
return HardMediumSoftLongScore.ZERO;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, mediumScore, softScore };
}
@Override
public boolean equals(Object o) {
if (o instanceof HardMediumSoftLongScore other) {
return hardScore == other.hardScore()
&& mediumScore == other.mediumScore()
&& softScore == other.softScore();
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hardScore, mediumScore, softScore);
}
@Override
public int compareTo(HardMediumSoftLongScore other) {
if (hardScore != other.hardScore()) {
return Long.compare(hardScore, other.hardScore());
} else if (mediumScore != other.mediumScore()) {
return Long.compare(mediumScore, other.mediumScore());
} else {
return Long.compare(softScore, other.softScore());
}
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> n.longValue() != 0L, HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return hardScore + HARD_LABEL + "/" + mediumScore + MEDIUM_LABEL + "/" + softScore + SOFT_LABEL;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/hardsoft/HardSoftScore.java | package ai.timefold.solver.core.api.score.buildin.hardsoft;
import static ai.timefold.solver.core.impl.score.ScoreUtil.HARD_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.SOFT_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.parseLevelAsInt;
import static ai.timefold.solver.core.impl.score.ScoreUtil.parseScoreTokens;
import java.util.Objects;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 2 levels of int constraints: hard and soft.
* Hard constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class HardSoftScore implements Score<HardSoftScore> {
public static final HardSoftScore ZERO = new HardSoftScore(0, 0);
public static final HardSoftScore ONE_HARD = new HardSoftScore(1, 0);
public static final HardSoftScore ONE_SOFT = new HardSoftScore(0, 1);
private static final HardSoftScore MINUS_ONE_SOFT = new HardSoftScore(0, -1);
private static final HardSoftScore MINUS_ONE_HARD = new HardSoftScore(-1, 0);
public static HardSoftScore parseScore(String scoreString) {
var scoreTokens = parseScoreTokens(HardSoftScore.class, scoreString, HARD_LABEL, SOFT_LABEL);
var hardScore = parseLevelAsInt(HardSoftScore.class, scoreString, scoreTokens[0]);
var softScore = parseLevelAsInt(HardSoftScore.class, scoreString, scoreTokens[1]);
return of(hardScore, softScore);
}
/**
* @deprecated Use {@link #of(int, int)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static HardSoftScore ofUninitialized(int initScore, int hardScore, int softScore) {
return of(hardScore, softScore);
}
public static HardSoftScore of(int hardScore, int softScore) {
// Optimization for frequently seen values.
if (hardScore == 0) {
if (softScore == -1) {
return MINUS_ONE_SOFT;
} else if (softScore == 0) {
return ZERO;
} else if (softScore == 1) {
return ONE_SOFT;
}
} else if (softScore == 0) {
if (hardScore == 1) {
return ONE_HARD;
} else if (hardScore == -1) {
return MINUS_ONE_HARD;
}
}
// Every other case is constructed.
return new HardSoftScore(hardScore, softScore);
}
public static HardSoftScore ofHard(int hardScore) {
// Optimization for frequently seen values.
if (hardScore == -1) {
return MINUS_ONE_HARD;
} else if (hardScore == 0) {
return ZERO;
} else if (hardScore == 1) {
return ONE_HARD;
}
// Every other case is constructed.
return new HardSoftScore(hardScore, 0);
}
public static HardSoftScore ofSoft(int softScore) {
// Optimization for frequently seen values.
if (softScore == -1) {
return MINUS_ONE_SOFT;
} else if (softScore == 0) {
return ZERO;
} else if (softScore == 1) {
return ONE_SOFT;
}
// Every other case is constructed.
return new HardSoftScore(0, softScore);
}
private final int hardScore;
private final int softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private HardSoftScore() {
this(Integer.MIN_VALUE, Integer.MIN_VALUE);
}
private HardSoftScore(int hardScore, int softScore) {
this.hardScore = hardScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public int hardScore() {
return hardScore;
}
/**
* As defined by {@link #hardScore()}.
*
* @deprecated Use {@link #hardScore()} instead.
*/
@Deprecated(forRemoval = true)
public int getHardScore() {
return hardScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public int softScore() {
return softScore;
}
/**
* As defined by {@link #softScore()}.
*
* @deprecated Use {@link #softScore()} instead.
*/
@Deprecated(forRemoval = true)
public int getSoftScore() {
return softScore;
}
@Override
public boolean isFeasible() {
return hardScore >= 0;
}
@Override
public HardSoftScore add(HardSoftScore addend) {
return of(hardScore + addend.hardScore(),
softScore + addend.softScore());
}
@Override
public HardSoftScore subtract(HardSoftScore subtrahend) {
return of(hardScore - subtrahend.hardScore(),
softScore - subtrahend.softScore());
}
@Override
public HardSoftScore multiply(double multiplicand) {
return of((int) Math.floor(hardScore * multiplicand),
(int) Math.floor(softScore * multiplicand));
}
@Override
public HardSoftScore divide(double divisor) {
return of((int) Math.floor(hardScore / divisor),
(int) Math.floor(softScore / divisor));
}
@Override
public HardSoftScore power(double exponent) {
return of((int) Math.floor(Math.pow(hardScore, exponent)),
(int) Math.floor(Math.pow(softScore, exponent)));
}
@Override
public HardSoftScore abs() {
return of(Math.abs(hardScore), Math.abs(softScore));
}
@Override
public HardSoftScore zero() {
return ZERO;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, softScore };
}
@Override
public boolean equals(Object o) {
if (o instanceof HardSoftScore other) {
return hardScore == other.hardScore()
&& softScore == other.softScore();
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hardScore, softScore);
}
@Override
public int compareTo(HardSoftScore other) {
if (hardScore != other.hardScore()) {
return Integer.compare(hardScore, other.hardScore());
} else {
return Integer.compare(softScore, other.softScore());
}
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> n.intValue() != 0, HARD_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return hardScore + HARD_LABEL + "/" + softScore + SOFT_LABEL;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/hardsoftbigdecimal/HardSoftBigDecimalScore.java | package ai.timefold.solver.core.api.score.buildin.hardsoftbigdecimal;
import static ai.timefold.solver.core.impl.score.ScoreUtil.HARD_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.SOFT_LABEL;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Objects;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 2 levels of {@link BigDecimal} constraints: hard and soft.
* Hard constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class HardSoftBigDecimalScore implements Score<HardSoftBigDecimalScore> {
public static final HardSoftBigDecimalScore ZERO =
new HardSoftBigDecimalScore(BigDecimal.ZERO, BigDecimal.ZERO);
public static final HardSoftBigDecimalScore ONE_HARD =
new HardSoftBigDecimalScore(BigDecimal.ONE, BigDecimal.ZERO);
public static final HardSoftBigDecimalScore ONE_SOFT =
new HardSoftBigDecimalScore(BigDecimal.ZERO, BigDecimal.ONE);
public static HardSoftBigDecimalScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(HardSoftBigDecimalScore.class, scoreString, HARD_LABEL, SOFT_LABEL);
var hardScore = ScoreUtil.parseLevelAsBigDecimal(HardSoftBigDecimalScore.class, scoreString, scoreTokens[0]);
var softScore = ScoreUtil.parseLevelAsBigDecimal(HardSoftBigDecimalScore.class, scoreString, scoreTokens[1]);
return of(hardScore, softScore);
}
/**
* @deprecated Use {@link #of(BigDecimal, BigDecimal)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static HardSoftBigDecimalScore ofUninitialized(int initScore, BigDecimal hardScore,
BigDecimal softScore) {
return of(hardScore, softScore);
}
public static HardSoftBigDecimalScore of(BigDecimal hardScore, BigDecimal softScore) {
// Optimization for frequently seen values.
if (hardScore.signum() == 0) {
if (softScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(softScore, BigDecimal.ONE)) {
return ONE_SOFT;
}
} else if (Objects.equals(hardScore, BigDecimal.ONE) && softScore.signum() == 0) {
return ONE_HARD;
}
// Every other case is constructed.
return new HardSoftBigDecimalScore(hardScore, softScore);
}
public static HardSoftBigDecimalScore ofHard(BigDecimal hardScore) {
// Optimization for frequently seen values.
if (hardScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(hardScore, BigDecimal.ONE)) {
return ONE_HARD;
}
// Every other case is constructed.
return new HardSoftBigDecimalScore(hardScore, BigDecimal.ZERO);
}
public static HardSoftBigDecimalScore ofSoft(BigDecimal softScore) {
// Optimization for frequently seen values.
if (softScore.signum() == 0) {
return ZERO;
} else if (Objects.equals(softScore, BigDecimal.ONE)) {
return ONE_SOFT;
}
// Every other case is constructed.
return new HardSoftBigDecimalScore(BigDecimal.ZERO, softScore);
}
private final BigDecimal hardScore;
private final BigDecimal softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private HardSoftBigDecimalScore() {
this(BigDecimal.ZERO, BigDecimal.ZERO);
}
private HardSoftBigDecimalScore(BigDecimal hardScore, BigDecimal softScore) {
this.hardScore = hardScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public BigDecimal hardScore() {
return hardScore;
}
/**
* As defined by {@link #hardScore()}.
*
* @deprecated Use {@link #hardScore()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getHardScore() {
return hardScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public BigDecimal softScore() {
return softScore;
}
/**
* As defined by {@link #softScore()}.
*
* @deprecated Use {@link #softScore()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getSoftScore() {
return softScore;
}
@Override
public boolean isFeasible() {
return hardScore.signum() >= 0;
}
@Override
public HardSoftBigDecimalScore add(HardSoftBigDecimalScore addend) {
return of(hardScore.add(addend.hardScore()),
softScore.add(addend.softScore()));
}
@Override
public HardSoftBigDecimalScore subtract(HardSoftBigDecimalScore subtrahend) {
return of(hardScore.subtract(subtrahend.hardScore()),
softScore.subtract(subtrahend.softScore()));
}
@Override
public HardSoftBigDecimalScore multiply(double multiplicand) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var multiplicandBigDecimal = BigDecimal.valueOf(multiplicand);
// The (unspecified) scale/precision of the multiplicand should have no impact on the returned scale/precision
return of(hardScore.multiply(multiplicandBigDecimal).setScale(hardScore.scale(), RoundingMode.FLOOR),
softScore.multiply(multiplicandBigDecimal).setScale(softScore.scale(), RoundingMode.FLOOR));
}
@Override
public HardSoftBigDecimalScore divide(double divisor) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var divisorBigDecimal = BigDecimal.valueOf(divisor);
// The (unspecified) scale/precision of the divisor should have no impact on the returned scale/precision
return of(hardScore.divide(divisorBigDecimal, hardScore.scale(), RoundingMode.FLOOR),
softScore.divide(divisorBigDecimal, softScore.scale(), RoundingMode.FLOOR));
}
@Override
public HardSoftBigDecimalScore power(double exponent) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var exponentBigDecimal = BigDecimal.valueOf(exponent);
// The (unspecified) scale/precision of the exponent should have no impact on the returned scale/precision
// TODO FIXME remove .intValue() so non-integer exponents produce correct results
// None of the normal Java libraries support BigDecimal.pow(BigDecimal)
return of(hardScore.pow(exponentBigDecimal.intValue()).setScale(hardScore.scale(), RoundingMode.FLOOR),
softScore.pow(exponentBigDecimal.intValue()).setScale(softScore.scale(), RoundingMode.FLOOR));
}
@Override
public HardSoftBigDecimalScore abs() {
return of(hardScore.abs(), softScore.abs());
}
@Override
public HardSoftBigDecimalScore zero() {
return ZERO;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, softScore };
}
@Override
public boolean equals(Object o) {
if (o instanceof HardSoftBigDecimalScore other) {
return hardScore.stripTrailingZeros().equals(other.hardScore().stripTrailingZeros())
&& softScore.stripTrailingZeros().equals(other.softScore().stripTrailingZeros());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hardScore.stripTrailingZeros(), softScore.stripTrailingZeros());
}
@Override
public int compareTo(HardSoftBigDecimalScore other) {
var hardScoreComparison = hardScore.compareTo(other.hardScore());
if (hardScoreComparison != 0) {
return hardScoreComparison;
} else {
return softScore.compareTo(other.softScore());
}
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> ((BigDecimal) n).compareTo(BigDecimal.ZERO) != 0, HARD_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return hardScore + HARD_LABEL + "/" + softScore + SOFT_LABEL;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/hardsoftlong/HardSoftLongScore.java | package ai.timefold.solver.core.api.score.buildin.hardsoftlong;
import static ai.timefold.solver.core.impl.score.ScoreUtil.HARD_LABEL;
import static ai.timefold.solver.core.impl.score.ScoreUtil.SOFT_LABEL;
import java.util.Objects;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 2 levels of long constraints: hard and soft.
* Hard constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class HardSoftLongScore implements Score<HardSoftLongScore> {
public static final HardSoftLongScore ZERO = new HardSoftLongScore(0L, 0L);
public static final HardSoftLongScore ONE_HARD = new HardSoftLongScore(1L, 0L);
public static final HardSoftLongScore ONE_SOFT = new HardSoftLongScore(0L, 1L);
private static final HardSoftLongScore MINUS_ONE_SOFT = new HardSoftLongScore(0L, -1L);
private static final HardSoftLongScore MINUS_ONE_HARD = new HardSoftLongScore(-1L, 0L);
public static HardSoftLongScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(HardSoftLongScore.class, scoreString, HARD_LABEL, SOFT_LABEL);
var hardScore = ScoreUtil.parseLevelAsLong(HardSoftLongScore.class, scoreString, scoreTokens[0]);
var softScore = ScoreUtil.parseLevelAsLong(HardSoftLongScore.class, scoreString, scoreTokens[1]);
return of(hardScore, softScore);
}
/**
* @deprecated Use {@link #of(long, long)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static HardSoftLongScore ofUninitialized(int initScore, long hardScore, long softScore) {
return of(hardScore, softScore);
}
public static HardSoftLongScore of(long hardScore, long softScore) {
// Optimization for frequently seen values.
if (hardScore == 0L) {
if (softScore == -1L) {
return MINUS_ONE_SOFT;
} else if (softScore == 0L) {
return ZERO;
} else if (softScore == 1L) {
return ONE_SOFT;
}
} else if (softScore == 0L) {
if (hardScore == 1L) {
return ONE_HARD;
} else if (hardScore == -1L) {
return MINUS_ONE_HARD;
}
}
// Every other case is constructed.
return new HardSoftLongScore(hardScore, softScore);
}
public static HardSoftLongScore ofHard(long hardScore) {
// Optimization for frequently seen values.
if (hardScore == -1L) {
return MINUS_ONE_HARD;
} else if (hardScore == 0L) {
return ZERO;
} else if (hardScore == 1L) {
return ONE_HARD;
}
// Every other case is constructed.
return new HardSoftLongScore(hardScore, 0L);
}
public static HardSoftLongScore ofSoft(long softScore) {
// Optimization for frequently seen values.
if (softScore == -1L) {
return MINUS_ONE_SOFT;
} else if (softScore == 0L) {
return ZERO;
} else if (softScore == 1L) {
return ONE_SOFT;
}
// Every other case is constructed.
return new HardSoftLongScore(0L, softScore);
}
private final long hardScore;
private final long softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private HardSoftLongScore() {
this(Long.MIN_VALUE, Long.MIN_VALUE);
}
private HardSoftLongScore(long hardScore, long softScore) {
this.hardScore = hardScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public long hardScore() {
return hardScore;
}
/**
* As defined by {@link #hardScore()}.
*
* @deprecated Use {@link #hardScore()} instead.
*/
@Deprecated(forRemoval = true)
public long getHardScore() {
return hardScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public long softScore() {
return softScore;
}
/**
* As defined by {@link #softScore()}.
*
* @deprecated Use {@link #softScore()} instead.
*/
@Deprecated(forRemoval = true)
public long getSoftScore() {
return softScore;
}
@Override
public boolean isFeasible() {
return hardScore >= 0L;
}
@Override
public HardSoftLongScore add(HardSoftLongScore addend) {
return of(hardScore + addend.hardScore(),
softScore + addend.softScore());
}
@Override
public HardSoftLongScore subtract(HardSoftLongScore subtrahend) {
return of(hardScore - subtrahend.hardScore(),
softScore - subtrahend.softScore());
}
@Override
public HardSoftLongScore multiply(double multiplicand) {
return of((long) Math.floor(hardScore * multiplicand),
(long) Math.floor(softScore * multiplicand));
}
@Override
public HardSoftLongScore divide(double divisor) {
return of((long) Math.floor(hardScore / divisor),
(long) Math.floor(softScore / divisor));
}
@Override
public HardSoftLongScore power(double exponent) {
return of((long) Math.floor(Math.pow(hardScore, exponent)),
(long) Math.floor(Math.pow(softScore, exponent)));
}
@Override
public HardSoftLongScore abs() {
return of(Math.abs(hardScore), Math.abs(softScore));
}
@Override
public HardSoftLongScore zero() {
return ZERO;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, softScore };
}
@Override
public boolean equals(Object o) {
if (o instanceof HardSoftLongScore other) {
return hardScore == other.hardScore()
&& softScore == other.softScore();
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hardScore, softScore);
}
@Override
public int compareTo(HardSoftLongScore other) {
if (hardScore != other.hardScore()) {
return Long.compare(hardScore, other.hardScore());
} else {
return Long.compare(softScore, other.softScore());
}
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> n.longValue() != 0L, HARD_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return hardScore + HARD_LABEL + "/" + softScore + SOFT_LABEL;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/simple/SimpleScore.java | package ai.timefold.solver.core.api.score.buildin.simple;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 1 level of int constraints.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class SimpleScore implements Score<SimpleScore> {
public static final SimpleScore ZERO = new SimpleScore(0);
public static final SimpleScore ONE = new SimpleScore(1);
private static final SimpleScore MINUS_ONE = new SimpleScore(-1);
public static SimpleScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(SimpleScore.class, scoreString, "");
var score = ScoreUtil.parseLevelAsInt(SimpleScore.class, scoreString, scoreTokens[0]);
return of(score);
}
/**
* @deprecated Use {@link #of(int)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static SimpleScore ofUninitialized(int initScore, int score) {
return of(score);
}
public static SimpleScore of(int score) {
return switch (score) {
case -1 -> MINUS_ONE;
case 0 -> ZERO;
case 1 -> ONE;
default -> new SimpleScore(score);
};
}
private final int score;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private SimpleScore() {
this(Integer.MIN_VALUE);
}
private SimpleScore(int score) {
this.score = score;
}
/**
* The total of the broken negative constraints and fulfilled positive constraints.
* Their weight is included in the total.
* The score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no constraints are broken/fulfilled
*/
public int score() {
return score;
}
/**
* As defined by {@link #score()}.
*
* @deprecated Use {@link #score()} instead.
*/
@Deprecated(forRemoval = true)
public int getScore() {
return score;
}
@Override
public SimpleScore add(SimpleScore addend) {
return of(score + addend.score());
}
@Override
public SimpleScore subtract(SimpleScore subtrahend) {
return of(score - subtrahend.score());
}
@Override
public SimpleScore multiply(double multiplicand) {
return of((int) Math.floor(score * multiplicand));
}
@Override
public SimpleScore divide(double divisor) {
return of((int) Math.floor(score / divisor));
}
@Override
public SimpleScore power(double exponent) {
return of((int) Math.floor(Math.pow(score, exponent)));
}
@Override
public SimpleScore abs() {
return of(Math.abs(score));
}
@Override
public SimpleScore zero() {
return ZERO;
}
@Override
public boolean isFeasible() {
return true;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { score };
}
@Override
public boolean equals(Object o) {
if (o instanceof SimpleScore other) {
return score == other.score();
}
return false;
}
@Override
public int hashCode() {
return Integer.hashCode(score);
}
@Override
public int compareTo(SimpleScore other) {
return Integer.compare(score, other.score());
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> n.intValue() != 0, "");
}
@Override
public String toString() {
return Integer.toString(score);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/simplebigdecimal/SimpleBigDecimalScore.java | package ai.timefold.solver.core.api.score.buildin.simplebigdecimal;
import java.math.BigDecimal;
import java.math.RoundingMode;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 1 level of {@link BigDecimal} constraints.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class SimpleBigDecimalScore implements Score<SimpleBigDecimalScore> {
public static final SimpleBigDecimalScore ZERO = new SimpleBigDecimalScore(BigDecimal.ZERO);
public static final SimpleBigDecimalScore ONE = new SimpleBigDecimalScore(BigDecimal.ONE);
public static SimpleBigDecimalScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(SimpleBigDecimalScore.class, scoreString, "");
var score = ScoreUtil.parseLevelAsBigDecimal(SimpleBigDecimalScore.class, scoreString, scoreTokens[0]);
return of(score);
}
/**
* @deprecated Use {@link #of(BigDecimal)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static SimpleBigDecimalScore ofUninitialized(int initScore, BigDecimal score) {
return of(score);
}
public static SimpleBigDecimalScore of(BigDecimal score) {
if (score.signum() == 0) {
return ZERO;
} else if (score.equals(BigDecimal.ONE)) {
return ONE;
} else {
return new SimpleBigDecimalScore(score);
}
}
private final BigDecimal score;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private SimpleBigDecimalScore() {
this(BigDecimal.ZERO);
}
private SimpleBigDecimalScore(BigDecimal score) {
this.score = score;
}
/**
* The total of the broken negative constraints and fulfilled positive constraints.
* Their weight is included in the total.
* The score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no constraints are broken/fulfilled
*/
public BigDecimal score() {
return score;
}
/**
* As defined by {@link #score()}.
*
* @deprecated Use {@link #score()} instead.
*/
@Deprecated(forRemoval = true)
public BigDecimal getScore() {
return score;
}
@Override
public SimpleBigDecimalScore add(SimpleBigDecimalScore addend) {
return of(score.add(addend.score()));
}
@Override
public SimpleBigDecimalScore subtract(SimpleBigDecimalScore subtrahend) {
return of(score.subtract(subtrahend.score()));
}
@Override
public SimpleBigDecimalScore multiply(double multiplicand) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var multiplicandBigDecimal = BigDecimal.valueOf(multiplicand);
// The (unspecified) scale/precision of the multiplicand should have no impact on the returned scale/precision
return of(score.multiply(multiplicandBigDecimal).setScale(score.scale(), RoundingMode.FLOOR));
}
@Override
public SimpleBigDecimalScore divide(double divisor) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var divisorBigDecimal = BigDecimal.valueOf(divisor);
// The (unspecified) scale/precision of the divisor should have no impact on the returned scale/precision
return of(score.divide(divisorBigDecimal, score.scale(), RoundingMode.FLOOR));
}
@Override
public SimpleBigDecimalScore power(double exponent) {
// Intentionally not taken "new BigDecimal(multiplicand, MathContext.UNLIMITED)"
// because together with the floor rounding it gives unwanted behaviour
var exponentBigDecimal = BigDecimal.valueOf(exponent);
// The (unspecified) scale/precision of the exponent should have no impact on the returned scale/precision
// TODO FIXME remove .intValue() so non-integer exponents produce correct results
// None of the normal Java libraries support BigDecimal.pow(BigDecimal)
return of(score.pow(exponentBigDecimal.intValue()).setScale(score.scale(), RoundingMode.FLOOR));
}
@Override
public SimpleBigDecimalScore abs() {
return of(score.abs());
}
@Override
public SimpleBigDecimalScore zero() {
return ZERO;
}
@Override
public boolean isFeasible() {
return true;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { score };
}
@Override
public boolean equals(Object o) {
if (o instanceof SimpleBigDecimalScore other) {
return score.stripTrailingZeros().equals(other.score().stripTrailingZeros());
}
return false;
}
@Override
public int hashCode() {
return score.stripTrailingZeros().hashCode();
}
@Override
public int compareTo(SimpleBigDecimalScore other) {
return score.compareTo(other.score());
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> ((BigDecimal) n).compareTo(BigDecimal.ZERO) != 0, "");
}
@Override
public String toString() {
return score.toString();
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/buildin/simplelong/SimpleLongScore.java | package ai.timefold.solver.core.api.score.buildin.simplelong;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.impl.score.ScoreUtil;
import org.jspecify.annotations.NullMarked;
/**
* This {@link Score} is based on 1 level of long constraints.
* <p>
* This class is immutable.
*
* @see Score
*/
@NullMarked
public final class SimpleLongScore implements Score<SimpleLongScore> {
public static final SimpleLongScore ZERO = new SimpleLongScore(0L);
public static final SimpleLongScore ONE = new SimpleLongScore(1L);
public static final SimpleLongScore MINUS_ONE = new SimpleLongScore(-1L);
public static SimpleLongScore parseScore(String scoreString) {
var scoreTokens = ScoreUtil.parseScoreTokens(SimpleLongScore.class, scoreString, "");
var score = ScoreUtil.parseLevelAsLong(SimpleLongScore.class, scoreString, scoreTokens[0]);
return of(score);
}
/**
* @deprecated Use {@link #of(long)} instead.
* @return init score is always zero
*/
@Deprecated(forRemoval = true, since = "1.22.0")
public static SimpleLongScore ofUninitialized(int initScore, long score) {
return of(score);
}
public static SimpleLongScore of(long score) {
if (score == -1L) {
return MINUS_ONE;
} else if (score == 0L) {
return ZERO;
} else if (score == 1L) {
return ONE;
} else {
return new SimpleLongScore(score);
}
}
private final long score;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* timefold-solver-jpa, timefold-solver-jackson, timefold-solver-jaxb, ...
*/
@SuppressWarnings("unused")
private SimpleLongScore() {
this(Long.MIN_VALUE);
}
private SimpleLongScore(long score) {
this.score = score;
}
/**
* The total of the broken negative constraints and fulfilled positive constraints.
* Their weight is included in the total.
* The score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no constraints are broken/fulfilled
*/
public long score() {
return score;
}
/**
* As defined by {@link #score()}.
*
* @deprecated Use {@link #score()} instead.
*/
@Deprecated(forRemoval = true)
public long getScore() {
return score;
}
@Override
public SimpleLongScore add(SimpleLongScore addend) {
return of(score + addend.score());
}
@Override
public SimpleLongScore subtract(SimpleLongScore subtrahend) {
return of(score - subtrahend.score());
}
@Override
public SimpleLongScore multiply(double multiplicand) {
return of((long) Math.floor(score * multiplicand));
}
@Override
public SimpleLongScore divide(double divisor) {
return of((long) Math.floor(score / divisor));
}
@Override
public SimpleLongScore power(double exponent) {
return of((long) Math.floor(Math.pow(score, exponent)));
}
@Override
public SimpleLongScore abs() {
return of(Math.abs(score));
}
@Override
public SimpleLongScore zero() {
return ZERO;
}
@Override
public boolean isFeasible() {
return true;
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { score };
}
@Override
public boolean equals(Object o) {
if (o instanceof SimpleLongScore other) {
return score == other.score();
}
return false;
}
@Override
public int hashCode() {
return Long.hashCode(score);
}
@Override
public int compareTo(SimpleLongScore other) {
return Long.compare(score, other.score());
}
@Override
public String toShortString() {
return ScoreUtil.buildShortString(this, n -> n.longValue() != 0L, "");
}
@Override
public String toString() {
return Long.toString(score);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/calculator/ConstraintMatchAwareIncrementalScoreCalculator.java | package ai.timefold.solver.core.api.score.calculator;
import java.util.Collection;
import java.util.Map;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.ScoreExplanation;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatchTotal;
import ai.timefold.solver.core.api.score.constraint.Indictment;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* Allows a {@link IncrementalScoreCalculator} to report {@link ConstraintMatchTotal}s
* for explaining a score (= which score constraints match for how much)
* and also for score corruption analysis.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the {@link Score} type
*/
public interface ConstraintMatchAwareIncrementalScoreCalculator<Solution_, Score_ extends Score<Score_>>
extends IncrementalScoreCalculator<Solution_, Score_> {
/**
* Allows for increased performance because it only tracks if constraintMatchEnabled is true.
* <p>
* Every implementation should call {@link #resetWorkingSolution}
* and only handle the constraintMatchEnabled parameter specifically (or ignore it).
*
* @param workingSolution to pass to {@link #resetWorkingSolution}.
* @param constraintMatchEnabled true if {@link #getConstraintMatchTotals()} or {@link #getIndictmentMap()} might be called.
*/
void resetWorkingSolution(@NonNull Solution_ workingSolution, boolean constraintMatchEnabled);
/**
* @return never null;
* if a constraint is present in the problem but resulted in no matches,
* it should still be present with a {@link ConstraintMatchTotal#getConstraintMatchSet()} size of 0.
* @throws IllegalStateException if {@link #resetWorkingSolution}'s constraintMatchEnabled parameter was false
*/
@NonNull
Collection<ConstraintMatchTotal<Score_>> getConstraintMatchTotals();
/**
* @return null if it should to be calculated non-incrementally from {@link #getConstraintMatchTotals()}
* @throws IllegalStateException if {@link #resetWorkingSolution}'s constraintMatchEnabled parameter was false
* @see ScoreExplanation#getIndictmentMap()
*/
@Nullable
Map<Object, Indictment<Score_>> getIndictmentMap();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/calculator/EasyScoreCalculator.java | package ai.timefold.solver.core.api.score.calculator;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import org.jspecify.annotations.NonNull;
/**
* Used for easy java {@link Score} calculation. This is non-incremental calculation, which is slow.
* <p>
* An implementation must be stateless.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the score type to go with the solution
*/
public interface EasyScoreCalculator<Solution_, Score_ extends Score<Score_>> {
/**
* This method is only called if the {@link Score} cannot be predicted.
* The {@link Score} can be predicted for example after an undo move.
*
*/
@NonNull
Score_ calculateScore(@NonNull Solution_ solution);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/calculator/IncrementalScoreCalculator.java | package ai.timefold.solver.core.api.score.calculator;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import ai.timefold.solver.core.api.score.Score;
import org.jspecify.annotations.NonNull;
/**
* Used for incremental java {@link Score} calculation.
* This is much faster than {@link EasyScoreCalculator} but requires much more code to implement too.
* <p>
* Any implementation is naturally stateful.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
* @param <Score_> the score type to go with the solution
*/
public interface IncrementalScoreCalculator<Solution_, Score_ extends Score<Score_>> {
/**
* There are no {@link #beforeEntityAdded(Object)} and {@link #afterEntityAdded(Object)} calls
* for entities that are already present in the workingSolution.
*/
void resetWorkingSolution(@NonNull Solution_ workingSolution);
/**
* @param entity an instance of a {@link PlanningEntity} class
*/
void beforeEntityAdded(@NonNull Object entity);
/**
* @param entity an instance of a {@link PlanningEntity} class
*/
void afterEntityAdded(@NonNull Object entity);
/**
* @param entity an instance of a {@link PlanningEntity} class
* @param variableName either a genuine or shadow {@link PlanningVariable}
*/
void beforeVariableChanged(@NonNull Object entity, @NonNull String variableName);
/**
* @param entity an instance of a {@link PlanningEntity} class
* @param variableName either a genuine or shadow {@link PlanningVariable}
*/
void afterVariableChanged(@NonNull Object entity, @NonNull String variableName);
default void beforeListVariableElementAssigned(@NonNull String variableName, @NonNull Object element) {
}
default void afterListVariableElementAssigned(@NonNull String variableName, @NonNull Object element) {
}
default void beforeListVariableElementUnassigned(@NonNull String variableName, @NonNull Object element) {
}
default void afterListVariableElementUnassigned(@NonNull String variableName, @NonNull Object element) {
}
default void beforeListVariableChanged(@NonNull Object entity, @NonNull String variableName, int fromIndex, int toIndex) {
}
default void afterListVariableChanged(@NonNull Object entity, @NonNull String variableName, int fromIndex, int toIndex) {
}
/**
* @param entity an instance of a {@link PlanningEntity} class
*/
void beforeEntityRemoved(@NonNull Object entity);
/**
* @param entity an instance of a {@link PlanningEntity} class
*/
void afterEntityRemoved(@NonNull Object entity);
/**
* This method is only called if the {@link Score} cannot be predicted.
* The {@link Score} can be predicted for example after an undo move.
*/
@NonNull
Score_ calculateScore();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/constraint/ConstraintMatch.java | package ai.timefold.solver.core.api.score.constraint;
import static java.util.Objects.requireNonNull;
import java.util.Collection;
import java.util.List;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.stream.Constraint;
import ai.timefold.solver.core.api.score.stream.ConstraintJustification;
import ai.timefold.solver.core.api.score.stream.DefaultConstraintJustification;
import ai.timefold.solver.core.api.solver.SolutionManager;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* Retrievable from {@link ConstraintMatchTotal#getConstraintMatchSet()}
* and {@link Indictment#getConstraintMatchSet()}.
*
* <p>
* This class implements {@link Comparable} for consistent ordering of constraint matches in visualizations.
* The details of this ordering are unspecified and are subject to change.
*
* <p>
* If possible, prefer using {@link SolutionManager#analyze(Object)} instead.
*
* @param <Score_> the actual score type
*/
public final class ConstraintMatch<Score_ extends Score<Score_>> implements Comparable<ConstraintMatch<Score_>> {
private final ConstraintRef constraintRef;
private final ConstraintJustification justification;
private final List<Object> indictedObjectList;
private final Score_ score;
/**
* @deprecated Prefer {@link ConstraintMatch#ConstraintMatch(ConstraintRef, ConstraintJustification, Collection, Score)}.
* @param constraintPackage never null
* @param constraintName never null
* @param justificationList never null, sometimes empty
* @param score never null
*/
@Deprecated(forRemoval = true)
public ConstraintMatch(String constraintPackage, String constraintName, List<Object> justificationList, Score_ score) {
this(constraintPackage, constraintName, DefaultConstraintJustification.of(score, justificationList),
justificationList, score);
}
/**
* @deprecated Prefer {@link ConstraintMatch#ConstraintMatch(ConstraintRef, ConstraintJustification, Collection, Score)}.
* @param constraintPackage never null
* @param constraintName never null
* @param justification never null
* @param score never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
public ConstraintMatch(String constraintPackage, String constraintName, ConstraintJustification justification,
Collection<Object> indictedObjectList, Score_ score) {
this(ConstraintRef.of(constraintPackage, constraintName), justification, indictedObjectList, score);
}
/**
* @deprecated Prefer {@link ConstraintMatch#ConstraintMatch(ConstraintRef, ConstraintJustification, Collection, Score)}.
* @param constraint never null
* @param justification never null
* @param score never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
public ConstraintMatch(Constraint constraint, ConstraintJustification justification, Collection<Object> indictedObjectList,
Score_ score) {
this(constraint.getConstraintRef(), justification, indictedObjectList, score);
}
/**
* @deprecated Prefer {@link ConstraintMatch#ConstraintMatch(ConstraintRef, ConstraintJustification, Collection, Score)}.
* @param constraintId never null
* @param constraintPackage never null
* @param constraintName never null
* @param justification never null
* @param score never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
public ConstraintMatch(String constraintId, String constraintPackage, String constraintName,
ConstraintJustification justification, Collection<Object> indictedObjectList, Score_ score) {
this(new ConstraintRef(constraintPackage, constraintName, constraintId), justification, indictedObjectList, score);
}
/**
* @param constraintRef unique identifier of the constraint
* @param justification only null if justifications are disabled
* @param indictedObjectList never null, empty if justifications are disabled
* @param score penalty or reward associated with the constraint match
*/
public ConstraintMatch(@NonNull ConstraintRef constraintRef, @Nullable ConstraintJustification justification,
@NonNull Collection<Object> indictedObjectList, @NonNull Score_ score) {
this.constraintRef = requireNonNull(constraintRef);
this.justification = justification;
this.indictedObjectList =
requireNonNull(indictedObjectList) instanceof List<Object> list ? list : List.copyOf(indictedObjectList);
this.score = requireNonNull(score);
}
public @NonNull ConstraintRef getConstraintRef() {
return constraintRef;
}
/**
* @deprecated Prefer {@link #getConstraintRef()} instead.
* @return maybe null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
public String getConstraintPackage() {
return constraintRef.packageName();
}
/**
* @deprecated Prefer {@link #getConstraintRef()} instead.
* @return never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
public String getConstraintName() {
return constraintRef.constraintName();
}
/**
* @deprecated Prefer {@link #getConstraintRef()} instead.
* @return never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
public String getConstraintId() {
return constraintRef.constraintId();
}
/**
* Return a list of justifications for the constraint.
* <p>
* This method has a different meaning based on which score director the constraint comes from.
* <ul>
* <li>For constraint streams, it returns a list of facts from the matching tuple for backwards compatibility
* (eg. [A, B] for a bi stream),
* unless a custom justification mapping was provided, in which case it throws an exception,
* pointing users towards {@link #getJustification()}.</li>
* <li>For incremental score calculation, it returns what the calculator is implemented to return.</li>
* </ul>
*
* @deprecated Prefer {@link #getJustification()} or {@link #getIndictedObjectList()}.
* @return never null
*/
@Deprecated(forRemoval = true)
public List<Object> getJustificationList() {
if (justification instanceof DefaultConstraintJustification constraintJustification) { // No custom function provided.
return constraintJustification.getFacts();
} else {
throw new IllegalStateException("Cannot retrieve list of facts from a custom constraint justification ("
+ justification + ").\n" +
"Use ConstraintMatch#getJustification() method instead.");
}
}
/**
* Return a singular justification for the constraint.
* <p>
* This method has a different meaning based on which score director the constraint comes from.
* <ul>
* <li>For constraint streams, it returns {@link DefaultConstraintJustification} from the matching tuple
* (eg. [A, B] for a bi stream), unless a custom justification mapping was provided,
* in which case it returns the return value of that function.</li>
* <li>For incremental score calculation, it returns what the calculator is implemented to return.</li>
* <li>It may return null, if justification support was disabled altogether.</li>
* </ul>
*/
public <Justification_ extends ConstraintJustification> @Nullable Justification_ getJustification() {
return (Justification_) justification;
}
/**
* Returns a set of objects indicted for causing this constraint match.
* <p>
* This method has a different meaning based on which score director the constraint comes from.
* <ul>
* <li>For constraint streams, it returns the facts from the matching tuple
* (eg. [A, B] for a bi stream), unless a custom indictment mapping was provided,
* in which case it returns the return value of that function.</li>
* <li>For incremental score calculation, it returns what the calculator is implemented to return.</li>
* <li>It may return an empty list, if justification support was disabled altogether.</li>
* </ul>
*
* @return may be empty or contain null
*/
public @NonNull List<Object> getIndictedObjectList() {
return indictedObjectList;
}
public @NonNull Score_ getScore() {
return score;
}
// ************************************************************************
// Worker methods
// ************************************************************************
public String getIdentificationString() {
return getConstraintRef().constraintId() + "/" + justification;
}
@Override
public int compareTo(ConstraintMatch<Score_> other) {
if (!constraintRef.equals(other.constraintRef)) {
return constraintRef.compareTo(other.constraintRef);
} else if (!score.equals(other.score)) {
return score.compareTo(other.score);
} else if (justification == null) {
return other.justification == null ? 0 : -1;
} else if (other.justification == null) {
return 1;
} else if (justification instanceof Comparable comparable) {
return comparable.compareTo(other.justification);
}
return Integer.compare(System.identityHashCode(justification),
System.identityHashCode(other.justification));
}
@Override
public String toString() {
return getIdentificationString() + "=" + score;
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/constraint/ConstraintMatchTotal.java | package ai.timefold.solver.core.api.score.constraint;
import java.util.Set;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.ScoreExplanation;
import ai.timefold.solver.core.api.solver.SolutionManager;
import org.jspecify.annotations.NonNull;
/**
* Explains the {@link Score} of a {@link PlanningSolution}, from the opposite side than {@link Indictment}.
* Retrievable from {@link ScoreExplanation#getConstraintMatchTotalMap()}.
*
* <p>
* If possible, prefer using {@link SolutionManager#analyze(Object)} instead.
*
* @param <Score_> the actual score type
*/
public interface ConstraintMatchTotal<Score_ extends Score<Score_>> {
/**
* @param constraintPackage never null
* @param constraintName never null
* @return never null
* @deprecated Prefer {@link ConstraintRef#of(String, String)}.
*/
@Deprecated(forRemoval = true, since = "1.4.0")
static String composeConstraintId(String constraintPackage, String constraintName) {
return constraintPackage + "/" + constraintName;
}
@NonNull
ConstraintRef getConstraintRef();
/**
* @return never null
* @deprecated Prefer {@link #getConstraintRef()}.
*/
@Deprecated(forRemoval = true, since = "1.4.0")
default String getConstraintPackage() {
return getConstraintRef().packageName();
}
/**
* @return never null
* @deprecated Prefer {@link #getConstraintRef()}.
*/
@Deprecated(forRemoval = true, since = "1.4.0")
default String getConstraintName() {
return getConstraintRef().constraintName();
}
/**
* The effective value of constraint weight after applying optional overrides.
* It is independent to the state of the {@link PlanningVariable planning variables}.
* Do not confuse with {@link #getScore()}.
*/
@NonNull
Score_ getConstraintWeight();
@NonNull
Set<ConstraintMatch<Score_>> getConstraintMatchSet();
/**
* @return {@code >= 0}
*/
default int getConstraintMatchCount() {
return getConstraintMatchSet().size();
}
/**
* Sum of the {@link #getConstraintMatchSet()}'s {@link ConstraintMatch#getScore()}.
*/
@NonNull
Score_ getScore();
/**
* @return never null
* @deprecated Prefer {@link #getConstraintRef()}.
*/
@Deprecated(forRemoval = true, since = "1.4.0")
default String getConstraintId() {
return getConstraintRef().constraintId();
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/constraint/ConstraintRef.java | package ai.timefold.solver.core.api.score.constraint;
import java.util.Objects;
import ai.timefold.solver.core.api.domain.constraintweight.ConstraintConfiguration;
import ai.timefold.solver.core.api.domain.constraintweight.ConstraintWeight;
import org.jspecify.annotations.NonNull;
/**
* Represents a unique identifier of a constraint.
* <p>
* Users should have no need to create instances of this record.
* If necessary, use {@link ConstraintRef#of(String, String)} and not the record's constructors.
*
* @param packageName The constraint package is the namespace of the constraint.
* When using a {@link ConstraintConfiguration},
* it is equal to the {@link ConstraintWeight#constraintPackage()}.
* It is not recommended for the user to set this, or to read its value;
* instead, the user should use whatever the solver provided as default and not rely on this information at all.
* The entire concept of constraint package is likely to be removed in a future version of the solver.
* @param constraintName The constraint name.
* It might not be unique, but {@link #constraintId()} is unique.
* When using a {@link ConstraintConfiguration},
* it is equal to the {@link ConstraintWeight#value()}.
* @param constraintId Always derived from {@code packageName} and {@code constraintName}.
*/
public record ConstraintRef(@NonNull String packageName, @NonNull String constraintName, String constraintId)
implements
Comparable<ConstraintRef> {
private static final char PACKAGE_SEPARATOR = '/';
public static ConstraintRef of(String packageName, String constraintName) {
return new ConstraintRef(packageName, constraintName, null);
}
public static ConstraintRef parseId(String constraintId) {
var slashIndex = constraintId.indexOf(PACKAGE_SEPARATOR);
if (slashIndex < 0) {
throw new IllegalArgumentException(
"The constraintId (%s) is invalid as it does not contain a package separator (%s)."
.formatted(constraintId, PACKAGE_SEPARATOR));
}
var packageName = constraintId.substring(0, slashIndex);
var constraintName = constraintId.substring(slashIndex + 1);
return new ConstraintRef(packageName, constraintName, constraintId);
}
public static String composeConstraintId(String packageName, String constraintName) {
return packageName + PACKAGE_SEPARATOR + constraintName;
}
public ConstraintRef {
packageName = validate(packageName, "constraint package");
constraintName = validate(constraintName, "constraint name");
var expectedConstraintId = composeConstraintId(packageName, constraintName);
if (constraintId != null && !constraintId.equals(expectedConstraintId)) {
throw new IllegalArgumentException(
"Specifying custom constraintId (%s) is not allowed."
.formatted(constraintId));
}
constraintId = expectedConstraintId;
}
private static String validate(String identifier, String type) {
var sanitized = Objects.requireNonNull(identifier).trim();
if (sanitized.isEmpty()) {
throw new IllegalArgumentException("The %s cannot be empty."
.formatted(type));
} else if (sanitized.contains("" + PACKAGE_SEPARATOR)) {
throw new IllegalArgumentException("The %s (%s) cannot contain a package separator (%s)."
.formatted(type, sanitized, PACKAGE_SEPARATOR));
}
return sanitized;
}
@Override
public String toString() {
return constraintId;
}
@Override
public int compareTo(ConstraintRef other) {
return constraintId.compareTo(other.constraintId);
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/constraint/Indictment.java | package ai.timefold.solver.core.api.score.constraint;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.ScoreExplanation;
import ai.timefold.solver.core.api.score.stream.ConstraintJustification;
import org.jspecify.annotations.NonNull;
/**
* Explains the {@link Score} of a {@link PlanningSolution}, from the opposite side than {@link ConstraintMatchTotal}.
* Retrievable from {@link ScoreExplanation#getIndictmentMap()}.
*
* @param <Score_> the actual score type
*/
public interface Indictment<Score_ extends Score<Score_>> {
/**
* As defined by {@link #getIndictedObject()}.
* <p>
* This is a poorly named legacy method, which does not in fact return a justification, but an indicted object.
* Each indictment may have multiple justifications, and they are accessed by {@link #getJustificationList()}.
*
* @deprecated Prefer {@link #getIndictedObject()}.
* @return never null
*/
@Deprecated(forRemoval = true)
default Object getJustification() {
return getIndictedObject();
}
/**
* The object that was involved in causing the constraints to match.
* It is part of {@link ConstraintMatch#getIndictedObjectList()} of every {@link ConstraintMatch}
* returned by {@link #getConstraintMatchSet()}.
*
* @param <IndictedObject_> Shorthand so that the user does not need to cast in user code.
*/
<IndictedObject_> @NonNull IndictedObject_ getIndictedObject();
@NonNull
Set<ConstraintMatch<Score_>> getConstraintMatchSet();
/**
* @return {@code >= 0}
*/
default int getConstraintMatchCount() {
return getConstraintMatchSet().size();
}
/**
* Retrieve {@link ConstraintJustification} instances associated with {@link ConstraintMatch}es in
* {@link #getConstraintMatchSet()}.
* This is equivalent to retrieving {@link #getConstraintMatchSet()}
* and collecting all {@link ConstraintMatch#getJustification()} objects into a list.
*
* @return guaranteed to contain unique instances
*/
@NonNull
List<ConstraintJustification> getJustificationList();
/**
* Retrieve {@link ConstraintJustification} instances associated with {@link ConstraintMatch}es in
* {@link #getConstraintMatchSet()}, which are of (or extend) a given constraint justification implementation.
* This is equivalent to retrieving {@link #getConstraintMatchSet()}
* and collecting all matching {@link ConstraintMatch#getJustification()} objects into a list.
*
* @return guaranteed to contain unique instances
*/
@NonNull
default <ConstraintJustification_ extends ConstraintJustification> List<ConstraintJustification_>
getJustificationList(Class<ConstraintJustification_> justificationClass) {
return getJustificationList()
.stream()
.filter(justification -> justificationClass.isAssignableFrom(justification.getClass()))
.map(j -> (ConstraintJustification_) j)
.collect(Collectors.toList());
}
/**
* Sum of the {@link #getConstraintMatchSet()}'s {@link ConstraintMatch#getScore()}.
*/
@NonNull
Score_ getScore();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/constraint/package-info.java | /**
* Explain a {@link ai.timefold.solver.core.api.score.Score} with
* {@link ai.timefold.solver.core.api.score.constraint.ConstraintMatchTotal} and
* {@link ai.timefold.solver.core.api.score.constraint.ConstraintMatch}.
*/
package ai.timefold.solver.core.api.score.constraint;
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/director/ScoreDirector.java | package ai.timefold.solver.core.api.score.director;
import ai.timefold.solver.core.api.domain.lookup.LookUpStrategyType;
import ai.timefold.solver.core.api.domain.lookup.PlanningId;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.solver.change.ProblemChange;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* The ScoreDirector holds the {@link PlanningSolution working solution}
* and calculates the {@link Score} for it.
*
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
*/
public interface ScoreDirector<Solution_> {
/**
* The {@link PlanningSolution} that is used to calculate the {@link Score}.
* <p>
* Because a {@link Score} is best calculated incrementally (by deltas),
* the {@link ScoreDirector} needs to be notified when its {@link PlanningSolution working solution} changes.
*/
@NonNull
Solution_ getWorkingSolution();
void beforeVariableChanged(Object entity, String variableName);
void afterVariableChanged(Object entity, String variableName);
void beforeListVariableElementAssigned(Object entity, String variableName, Object element);
void afterListVariableElementAssigned(Object entity, String variableName, Object element);
void beforeListVariableElementUnassigned(Object entity, String variableName, Object element);
void afterListVariableElementUnassigned(Object entity, String variableName, Object element);
void beforeListVariableChanged(Object entity, String variableName, int fromIndex, int toIndex);
void afterListVariableChanged(Object entity, String variableName, int fromIndex, int toIndex);
void triggerVariableListeners();
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void beforeEntityAdded(Object entity) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void afterEntityAdded(Object entity) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void beforeEntityRemoved(Object entity) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void afterEntityRemoved(Object entity) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void beforeProblemFactAdded(Object problemFact) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void afterProblemFactAdded(Object problemFact) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void beforeProblemPropertyChanged(Object problemFactOrEntity) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void afterProblemPropertyChanged(Object problemFactOrEntity) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void beforeProblemFactRemoved(Object problemFact) {
throw new UnsupportedOperationException();
}
/**
* @deprecated Calling this method by user code is not recommended and will lead to unforeseen consequences.
* Use {@link ProblemChange} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default void afterProblemFactRemoved(Object problemFact) {
throw new UnsupportedOperationException();
}
/**
* Translates an entity or fact instance (often from another {@link Thread} or JVM)
* to this {@link ScoreDirector}'s internal working instance.
* Useful for move rebasing and in a {@link ProblemChange}.
* <p>
* Matching is determined by the {@link LookUpStrategyType} on {@link PlanningSolution}.
* Matching uses a {@link PlanningId} by default.
*
* @return null if externalObject is null
* @throws IllegalArgumentException if there is no workingObject for externalObject, if it cannot be looked up
* or if the externalObject's class is not supported
* @throws IllegalStateException if it cannot be looked up
* @param <E> the object type
*/
<E> @Nullable E lookUpWorkingObject(@Nullable E externalObject);
/**
* As defined by {@link #lookUpWorkingObject(Object)},
* but doesn't fail fast if no workingObject was ever added for the externalObject.
* It's recommended to use {@link #lookUpWorkingObject(Object)} instead,
* especially in move rebasing code.
*
* @return null if externalObject is null or if there is no workingObject for externalObject
* @throws IllegalArgumentException if it cannot be looked up or if the externalObject's class is not supported
* @throws IllegalStateException if it cannot be looked up
* @param <E> the object type
*/
<E> @Nullable E lookUpWorkingObjectOrReturnNull(@Nullable E externalObject);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/Constraint.java | package ai.timefold.solver.core.api.score.stream;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* This represents a single constraint in the {@link ConstraintStream} API
* that impacts the {@link Score}.
* It is defined in {@link ConstraintProvider#defineConstraints(ConstraintFactory)}
* by calling {@link ConstraintFactory#forEach(Class)}.
*/
public interface Constraint {
String DEFAULT_CONSTRAINT_GROUP = "default";
/**
* The {@link ConstraintFactory} that built this.
*
* @deprecated for removal as it is not necessary on the public API.
* @return never null
*/
@Deprecated(forRemoval = true)
ConstraintFactory getConstraintFactory();
ConstraintRef getConstraintRef();
/**
* Returns a human-friendly description of the constraint.
* The format of the description is left unspecified and will not be parsed in any way.
*
* @return may be left empty
*/
default @NonNull String getDescription() {
return "";
}
default @NonNull String getConstraintGroup() {
return DEFAULT_CONSTRAINT_GROUP;
}
/**
* Returns the weight of the constraint as defined in the {@link ConstraintProvider},
* without any overrides.
*
* @return null if the constraint does not have a weight defined
*/
default <Score_ extends Score<Score_>> @Nullable Score_ getConstraintWeight() {
return null;
}
/**
* @deprecated Prefer {@link #getConstraintRef()}.
* @return never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
default String getConstraintPackage() {
return getConstraintRef().packageName();
}
/**
* @deprecated Prefer {@link #getConstraintRef()}.
* @return never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
default String getConstraintName() {
return getConstraintRef().constraintName();
}
/**
* @deprecated Prefer {@link #getConstraintRef()}.
* @return never null
*/
@Deprecated(forRemoval = true, since = "1.4.0")
default String getConstraintId() {
return getConstraintRef().constraintId();
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintBuilder.java | package ai.timefold.solver.core.api.score.stream;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatchTotal;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import org.jspecify.annotations.NonNull;
public interface ConstraintBuilder {
/**
* Builds a {@link Constraint} from the constraint stream.
* The {@link ConstraintRef#packageName() constraint package} defaults to the package of the {@link PlanningSolution} class.
* The constraint will be placed in the {@link Constraint#DEFAULT_CONSTRAINT_GROUP default constraint group}.
*
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
*/
default @NonNull Constraint asConstraint(@NonNull String constraintName) {
return asConstraintDescribed(constraintName, "");
}
/**
* Builds a {@link Constraint} from the constraint stream.
* The {@link ConstraintRef#packageName() constraint package} defaults to the package of the {@link PlanningSolution} class.
* The constraint will be placed in the {@link Constraint#DEFAULT_CONSTRAINT_GROUP default constraint group}.
*
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
*/
@NonNull
default Constraint asConstraintDescribed(@NonNull String constraintName, @NonNull String constraintDescription) {
return asConstraintDescribed(constraintName, constraintDescription, Constraint.DEFAULT_CONSTRAINT_GROUP);
}
/**
* Builds a {@link Constraint} from the constraint stream.
* The {@link ConstraintRef#packageName() constraint package} defaults to the package of the {@link PlanningSolution} class.
*
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
* @param constraintGroup only allows alphanumeric characters, "-" and "_"
*/
@NonNull
Constraint asConstraintDescribed(@NonNull String constraintName, @NonNull String constraintDescription,
@NonNull String constraintGroup);
/**
* Builds a {@link Constraint} from the constraint stream.
*
* @param constraintName never null, shows up in {@link ConstraintMatchTotal} during score justification
* @param constraintPackage never null
* @return never null
* @deprecated Constraint package should no longer be used, use {@link #asConstraint(String)} instead.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
Constraint asConstraint(String constraintPackage, String constraintName);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintCollectors.java | package ai.timefold.solver.core.api.score.stream;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.time.Duration;
import java.time.LocalDate;
import java.time.Period;
import java.time.temporal.Temporal;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.function.BiFunction;
import java.util.function.BiPredicate;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.IntFunction;
import java.util.function.Predicate;
import java.util.function.ToIntBiFunction;
import java.util.function.ToIntFunction;
import java.util.function.ToLongBiFunction;
import java.util.function.ToLongFunction;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.function.QuadFunction;
import ai.timefold.solver.core.api.function.QuadPredicate;
import ai.timefold.solver.core.api.function.ToIntQuadFunction;
import ai.timefold.solver.core.api.function.ToIntTriFunction;
import ai.timefold.solver.core.api.function.ToLongQuadFunction;
import ai.timefold.solver.core.api.function.ToLongTriFunction;
import ai.timefold.solver.core.api.function.TriFunction;
import ai.timefold.solver.core.api.function.TriPredicate;
import ai.timefold.solver.core.api.score.buildin.hardsoftbigdecimal.HardSoftBigDecimalScore;
import ai.timefold.solver.core.api.score.stream.bi.BiConstraintCollector;
import ai.timefold.solver.core.api.score.stream.common.ConnectedRangeChain;
import ai.timefold.solver.core.api.score.stream.common.LoadBalance;
import ai.timefold.solver.core.api.score.stream.common.SequenceChain;
import ai.timefold.solver.core.api.score.stream.quad.QuadConstraintCollector;
import ai.timefold.solver.core.api.score.stream.tri.TriConstraintCollector;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintCollector;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintStream;
import ai.timefold.solver.core.impl.score.stream.collector.bi.InnerBiConstraintCollectors;
import ai.timefold.solver.core.impl.score.stream.collector.quad.InnerQuadConstraintCollectors;
import ai.timefold.solver.core.impl.score.stream.collector.tri.InnerTriConstraintCollectors;
import ai.timefold.solver.core.impl.score.stream.collector.uni.InnerUniConstraintCollectors;
import ai.timefold.solver.core.impl.util.ConstantLambdaUtils;
import org.jspecify.annotations.NonNull;
/**
* Creates an {@link UniConstraintCollector}, {@link BiConstraintCollector}, ... instance
* for use in {@link UniConstraintStream#groupBy(Function, UniConstraintCollector)}, ...
*/
public final class ConstraintCollectors {
// ************************************************************************
// count
// ************************************************************************
/**
* Returns a collector that counts the number of elements that are being grouped.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(count())} returns {@code 5}.
* <p>
* The default result of the collector (e.g. when never called) is {@code 0}.
*
* @param <A> type of the matched fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Integer> count() {
return InnerUniConstraintCollectors.count();
}
/**
* As defined by {@link #count()}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Long> countLong() {
return InnerUniConstraintCollectors.countLong();
}
/**
* As defined by {@link #count()}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Integer> countBi() {
return InnerBiConstraintCollectors.count();
}
/**
* As defined by {@link #count()}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Long> countLongBi() {
return InnerBiConstraintCollectors.countLong();
}
/**
* As defined by {@link #count()}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Integer> countTri() {
return InnerTriConstraintCollectors.count();
}
/**
* As defined by {@link #count()}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Long> countLongTri() {
return InnerTriConstraintCollectors.countLong();
}
/**
* As defined by {@link #count()}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Integer> countQuad() {
return InnerQuadConstraintCollectors.count();
}
/**
* As defined by {@link #count()}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Long> countLongQuad() {
return InnerQuadConstraintCollectors.countLong();
}
// ************************************************************************
// countDistinct
// ************************************************************************
/**
* As defined by {@link #countDistinct(Function)}, with {@link Function#identity()} as the argument.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Integer> countDistinct() {
return countDistinct(ConstantLambdaUtils.identity());
}
/**
* Returns a collector that counts the number of unique elements that are being grouped.
* Uniqueness is determined by {@link #equals(Object) equality}.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(countDistinct(Person::getAge))} returns {@code 3}, one for age 20, 25 and 30 each.
* <p>
* The default result of the collector (e.g. when never called) is {@code 0}.
*
* @param <A> type of the matched fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Integer> countDistinct(@NonNull Function<A, ?> groupValueMapping) {
return InnerUniConstraintCollectors.countDistinct(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Long> countDistinctLong(@NonNull Function<A, ?> groupValueMapping) {
return InnerUniConstraintCollectors.countDistinctLong(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Integer> countDistinct(
@NonNull BiFunction<A, B, ?> groupValueMapping) {
return InnerBiConstraintCollectors.countDistinct(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Long> countDistinctLong(
@NonNull BiFunction<A, B, ?> groupValueMapping) {
return InnerBiConstraintCollectors.countDistinctLong(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Integer> countDistinct(
@NonNull TriFunction<A, B, C, ?> groupValueMapping) {
return InnerTriConstraintCollectors.countDistinct(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Long> countDistinctLong(
@NonNull TriFunction<A, B, C, ?> groupValueMapping) {
return InnerTriConstraintCollectors.countDistinctLong(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Integer> countDistinct(
@NonNull QuadFunction<A, B, C, D, ?> groupValueMapping) {
return InnerQuadConstraintCollectors.countDistinct(groupValueMapping);
}
/**
* As defined by {@link #countDistinct(Function)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Long> countDistinctLong(
@NonNull QuadFunction<A, B, C, D, ?> groupValueMapping) {
return InnerQuadConstraintCollectors.countDistinctLong(groupValueMapping);
}
// ************************************************************************
// sum
// ************************************************************************
/**
* Returns a collector that sums an {@code int} property of the elements that are being grouped.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(sum(Person::getAge))} returns {@code 125}.
* <p>
* The default result of the collector (e.g. when never called) is {@code 0}.
*
* @param <A> type of the matched fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Integer> sum(@NonNull ToIntFunction<? super A> groupValueMapping) {
return InnerUniConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Long>
sumLong(@NonNull ToLongFunction<? super A> groupValueMapping) {
return InnerUniConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, Result> @NonNull UniConstraintCollector<A, ?, Result> sum(
@NonNull Function<? super A, Result> groupValueMapping,
@NonNull Result zero, @NonNull BinaryOperator<Result> adder, @NonNull BinaryOperator<Result> subtractor) {
return InnerUniConstraintCollectors.sum(groupValueMapping, zero, adder, subtractor);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, BigDecimal> sumBigDecimal(
@NonNull Function<? super A, BigDecimal> groupValueMapping) {
return sum(groupValueMapping, BigDecimal.ZERO, BigDecimal::add, BigDecimal::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, BigInteger> sumBigInteger(
@NonNull Function<? super A, BigInteger> groupValueMapping) {
return sum(groupValueMapping, BigInteger.ZERO, BigInteger::add, BigInteger::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Duration> sumDuration(
@NonNull Function<? super A, Duration> groupValueMapping) {
return sum(groupValueMapping, Duration.ZERO, Duration::plus, Duration::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Period>
sumPeriod(@NonNull Function<? super A, Period> groupValueMapping) {
return sum(groupValueMapping, Period.ZERO, Period::plus, Period::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Integer> sum(
@NonNull ToIntBiFunction<? super A, ? super B> groupValueMapping) {
return InnerBiConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Long> sumLong(
@NonNull ToLongBiFunction<? super A, ? super B> groupValueMapping) {
return InnerBiConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, Result> @NonNull BiConstraintCollector<A, B, ?, Result> sum(
@NonNull BiFunction<? super A, ? super B, Result> groupValueMapping, @NonNull Result zero,
@NonNull BinaryOperator<Result> adder,
@NonNull BinaryOperator<Result> subtractor) {
return InnerBiConstraintCollectors.sum(groupValueMapping, zero, adder, subtractor);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, BigDecimal> sumBigDecimal(
@NonNull BiFunction<? super A, ? super B, BigDecimal> groupValueMapping) {
return sum(groupValueMapping, BigDecimal.ZERO, BigDecimal::add, BigDecimal::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, BigInteger> sumBigInteger(
@NonNull BiFunction<? super A, ? super B, BigInteger> groupValueMapping) {
return sum(groupValueMapping, BigInteger.ZERO, BigInteger::add, BigInteger::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Duration> sumDuration(
@NonNull BiFunction<? super A, ? super B, Duration> groupValueMapping) {
return sum(groupValueMapping, Duration.ZERO, Duration::plus, Duration::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Period> sumPeriod(
@NonNull BiFunction<? super A, ? super B, Period> groupValueMapping) {
return sum(groupValueMapping, Period.ZERO, Period::plus, Period::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Integer> sum(
@NonNull ToIntTriFunction<? super A, ? super B, ? super C> groupValueMapping) {
return InnerTriConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Long> sumLong(
@NonNull ToLongTriFunction<? super A, ? super B, ? super C> groupValueMapping) {
return InnerTriConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, Result> @NonNull TriConstraintCollector<A, B, C, ?, Result> sum(
@NonNull TriFunction<? super A, ? super B, ? super C, Result> groupValueMapping, @NonNull Result zero,
@NonNull BinaryOperator<Result> adder, @NonNull BinaryOperator<Result> subtractor) {
return InnerTriConstraintCollectors.sum(groupValueMapping, zero, adder, subtractor);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, BigDecimal> sumBigDecimal(
@NonNull TriFunction<? super A, ? super B, ? super C, BigDecimal> groupValueMapping) {
return sum(groupValueMapping, BigDecimal.ZERO, BigDecimal::add, BigDecimal::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, BigInteger> sumBigInteger(
@NonNull TriFunction<? super A, ? super B, ? super C, BigInteger> groupValueMapping) {
return sum(groupValueMapping, BigInteger.ZERO, BigInteger::add, BigInteger::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Duration> sumDuration(
@NonNull TriFunction<? super A, ? super B, ? super C, Duration> groupValueMapping) {
return sum(groupValueMapping, Duration.ZERO, Duration::plus, Duration::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Period> sumPeriod(
@NonNull TriFunction<? super A, ? super B, ? super C, Period> groupValueMapping) {
return sum(groupValueMapping, Period.ZERO, Period::plus, Period::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Integer> sum(
@NonNull ToIntQuadFunction<? super A, ? super B, ? super C, ? super D> groupValueMapping) {
return InnerQuadConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Long> sumLong(
@NonNull ToLongQuadFunction<? super A, ? super B, ? super C, ? super D> groupValueMapping) {
return InnerQuadConstraintCollectors.sum(groupValueMapping);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D, Result> @NonNull QuadConstraintCollector<A, B, C, D, ?, Result> sum(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, Result> groupValueMapping, @NonNull Result zero,
@NonNull BinaryOperator<Result> adder, @NonNull BinaryOperator<Result> subtractor) {
return InnerQuadConstraintCollectors.sum(groupValueMapping, zero, adder, subtractor);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, BigDecimal> sumBigDecimal(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, BigDecimal> groupValueMapping) {
return sum(groupValueMapping, BigDecimal.ZERO, BigDecimal::add, BigDecimal::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, BigInteger> sumBigInteger(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, BigInteger> groupValueMapping) {
return sum(groupValueMapping, BigInteger.ZERO, BigInteger::add, BigInteger::subtract);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Duration> sumDuration(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, Duration> groupValueMapping) {
return sum(groupValueMapping, Duration.ZERO, Duration::plus, Duration::minus);
}
/**
* As defined by {@link #sum(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Period> sumPeriod(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, Period> groupValueMapping) {
return sum(groupValueMapping, Period.ZERO, Period::plus, Period::minus);
}
// ************************************************************************
// min
// ************************************************************************
/**
* Returns a collector that finds a minimum value in a group of {@link Comparable} elements.
* <p>
* Important: The {@link Comparable}'s {@link Comparable#compareTo(Object)} must be <i>consistent with equals</i>,
* such that {@code e1.compareTo(e2) == 0} has the same boolean value as {@code e1.equals(e2)}.
* In other words, if two elements compare to zero, any of them can be returned by the collector.
* It can even differ between 2 score calculations on the exact same {@link PlanningSolution} state, due to
* incremental score calculation.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(min())} returns either {@code Ann} or {@code Eric} arbitrarily, assuming the objects are
* {@link Comparable} by the {@code age} field.
* To avoid this, always end your {@link Comparator} by an identity comparison, such as
* {@code Comparator.comparing(Person::getAge).comparing(Person::getId))}.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
*/
public static <A extends Comparable<A>> @NonNull UniConstraintCollector<A, ?, A> min() {
return InnerUniConstraintCollectors.min(ConstantLambdaUtils.identity());
}
/**
* Returns a collector that finds a minimum value in a group of {@link Comparable} elements.
* <p>
* Important: The {@link Comparable}'s {@link Comparable#compareTo(Object)} must be <i>consistent with equals</i>,
* such that {@code e1.compareTo(e2) == 0} has the same boolean value as {@code e1.equals(e2)}.
* In other words, if two elements compare to zero, any of them can be returned by the collector.
* It can even differ between 2 score calculations on the exact same {@link PlanningSolution} state, due to
* incremental score calculation.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(min(Person::getAge))} returns {@code 20}.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
* @param <Mapped> type of the result
* @param groupValueMapping maps facts from the matched type to the result type
*/
public static <A, Mapped extends Comparable<? super Mapped>> @NonNull UniConstraintCollector<A, ?, Mapped> min(
@NonNull Function<A, Mapped> groupValueMapping) {
return InnerUniConstraintCollectors.min(groupValueMapping);
}
/**
* Returns a collector that finds a minimum value in a group of {@link Comparable} elements.
* The elements will be compared according to the value returned by the comparable function.
* <p>
* Important: The {@link Comparable}'s {@link Comparable#compareTo(Object)} must be <i>consistent with equals</i>,
* such that {@code e1.compareTo(e2) == 0} has the same boolean value as {@code e1.equals(e2)}.
* In other words, if two elements compare to zero, any of them can be returned by the collector.
* It can even differ between 2 score calculations on the exact same {@link PlanningSolution} state, due to
* incremental score calculation.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(min(Person::name, Person::age))} returns {@code Ann} or {@code Eric},
* as both have the same age.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
* @param <Mapped> type of the result
* @param <Comparable_> type of the comparable property
* @param groupValueMapping maps facts from the matched type to the result type
* @param comparableFunction maps facts from the matched type to the comparable property
*/
public static <A, Mapped, Comparable_ extends Comparable<? super Comparable_>> @NonNull UniConstraintCollector<A, ?, Mapped>
min(@NonNull Function<A, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerUniConstraintCollectors.min(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #min()}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #min(Function, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A> UniConstraintCollector<A, ?, A> min(Comparator<? super A> comparator) {
return min(ConstantLambdaUtils.identity(), comparator);
}
/**
* As defined by {@link #min(Function)}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #min(Function, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, Mapped> UniConstraintCollector<A, ?, Mapped> min(Function<A, Mapped> groupValueMapping,
Comparator<? super Mapped> comparator) {
return InnerUniConstraintCollectors.min(groupValueMapping, comparator);
}
/**
* As defined by {@link #min(Function)}.
*/
public static <A, B, Mapped extends Comparable<? super Mapped>> @NonNull BiConstraintCollector<A, B, ?, Mapped> min(
@NonNull BiFunction<A, B, Mapped> groupValueMapping) {
return InnerBiConstraintCollectors.min(groupValueMapping);
}
/**
* As defined by {@link #min(Function, Function)}.
*/
public static <A, B, Mapped, Comparable_ extends Comparable<? super Comparable_>>
@NonNull BiConstraintCollector<A, B, ?, Mapped>
min(@NonNull BiFunction<A, B, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerBiConstraintCollectors.min(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #min(Function)}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #min(BiFunction, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, B, Mapped> BiConstraintCollector<A, B, ?, Mapped> min(BiFunction<A, B, Mapped> groupValueMapping,
Comparator<? super Mapped> comparator) {
return InnerBiConstraintCollectors.min(groupValueMapping, comparator);
}
/**
* As defined by {@link #min(Function)}.
*/
public static <A, B, C, Mapped extends Comparable<? super Mapped>> @NonNull TriConstraintCollector<A, B, C, ?, Mapped> min(
@NonNull TriFunction<A, B, C, Mapped> groupValueMapping) {
return InnerTriConstraintCollectors.min(groupValueMapping);
}
/**
* As defined by {@link #min(Function, Function)}.
*/
public static <A, B, C, Mapped, Comparable_ extends Comparable<? super Comparable_>>
@NonNull TriConstraintCollector<A, B, C, ?, Mapped>
min(@NonNull TriFunction<A, B, C, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerTriConstraintCollectors.min(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #min(Function)}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #min(TriFunction, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, B, C, Mapped> TriConstraintCollector<A, B, C, ?, Mapped> min(
TriFunction<A, B, C, Mapped> groupValueMapping, Comparator<? super Mapped> comparator) {
return InnerTriConstraintCollectors.min(groupValueMapping, comparator);
}
/**
* As defined by {@link #min(Function)}.
*/
public static <A, B, C, D, Mapped extends Comparable<? super Mapped>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Mapped> min(
@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping) {
return InnerQuadConstraintCollectors.min(groupValueMapping);
}
/**
* As defined by {@link #min(Function, Function)}.
*/
public static <A, B, C, D, Mapped, Comparable_ extends Comparable<? super Comparable_>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Mapped>
min(@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerQuadConstraintCollectors.min(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #min(Function)}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #min(QuadFunction, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, B, C, D, Mapped> QuadConstraintCollector<A, B, C, D, ?, Mapped> min(
QuadFunction<A, B, C, D, Mapped> groupValueMapping, Comparator<? super Mapped> comparator) {
return InnerQuadConstraintCollectors.min(groupValueMapping, comparator);
}
// ************************************************************************
// max
// ************************************************************************
/**
* Returns a collector that finds a maximum value in a group of {@link Comparable} elements.
* <p>
* Important: The {@link Comparable}'s {@link Comparable#compareTo(Object)} must be <i>consistent with equals</i>,
* such that {@code e1.compareTo(e2) == 0} has the same boolean value as {@code e1.equals(e2)}.
* In other words, if two elements compare to zero, any of them can be returned by the collector.
* It can even differ between 2 score calculations on the exact same {@link PlanningSolution} state, due to
* incremental score calculation.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(max())} returns either {@code Cathy} or {@code David} arbitrarily, assuming the objects are
* {@link Comparable} by the {@code age} field.
* To avoid this, always end your {@link Comparator} by an identity comparison, such as
* {@code Comparator.comparing(Person::getAge).comparing(Person::getId))}.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
*/
public static <A extends Comparable<A>> @NonNull UniConstraintCollector<A, ?, A> max() {
return InnerUniConstraintCollectors.max(ConstantLambdaUtils.identity());
}
/**
* Returns a collector that finds a maximum value in a group of {@link Comparable} elements.
* <p>
* Important: The {@link Comparable}'s {@link Comparable#compareTo(Object)} must be <i>consistent with equals</i>,
* such that {@code e1.compareTo(e2) == 0} has the same boolean value as {@code e1.equals(e2)}.
* In other words, if two elements compare to zero, any of them can be returned by the collector.
* It can even differ between 2 score calculations on the exact same {@link PlanningSolution} state, due to
* incremental score calculation.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(max(Person::getAge))} returns {@code 30}.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
* @param <Mapped> type of the result
* @param groupValueMapping maps facts from the matched type to the result type
*/
public static <A, Mapped extends Comparable<? super Mapped>> @NonNull UniConstraintCollector<A, ?, Mapped> max(
@NonNull Function<A, Mapped> groupValueMapping) {
return InnerUniConstraintCollectors.max(groupValueMapping);
}
/**
* As defined by {@link #max()}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #max(Function, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A> UniConstraintCollector<A, ?, A> max(Comparator<? super A> comparator) {
return InnerUniConstraintCollectors.max(ConstantLambdaUtils.identity(), comparator);
}
/**
* Returns a collector that finds a maximum value in a group of elements.
* The elements will be compared according to the value returned by the comparable function.
* <p>
* Important: The {@link Comparable}'s {@link Comparable#compareTo(Object)} must be <i>consistent with equals</i>,
* such that {@code e1.compareTo(e2) == 0} has the same boolean value as {@code e1.equals(e2)}.
* In other words, if two elements compare to zero, any of them can be returned by the collector.
* It can even differ between 2 score calculations on the exact same {@link PlanningSolution} state, due to
* incremental score calculation.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(max(Person::name, Person::age))} returns {@code Cathy} or {@code David},
* as both have the same age.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
* @param <Mapped> type of the result
* @param <Comparable_> type of the comparable property
* @param groupValueMapping maps facts from the matched type to the result type
*/
public static <A, Mapped, Comparable_ extends Comparable<? super Comparable_>> @NonNull UniConstraintCollector<A, ?, Mapped>
max(@NonNull Function<A, Mapped> groupValueMapping, @NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerUniConstraintCollectors.max(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #max(Function)}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #max(Function, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, Mapped> UniConstraintCollector<A, ?, Mapped> max(Function<A, Mapped> groupValueMapping,
Comparator<? super Mapped> comparator) {
return InnerUniConstraintCollectors.max(groupValueMapping, comparator);
}
/**
* As defined by {@link #max(Function)}.
*/
public static <A, B, Mapped extends Comparable<? super Mapped>> @NonNull BiConstraintCollector<A, B, ?, Mapped> max(
@NonNull BiFunction<A, B, Mapped> groupValueMapping) {
return InnerBiConstraintCollectors.max(groupValueMapping);
}
/**
* As defined by {@link #max(Function, Function)}, only with a custom {@link Comparator}.
*/
public static <A, B, Mapped, Comparable_ extends Comparable<? super Comparable_>>
@NonNull BiConstraintCollector<A, B, ?, Mapped>
max(@NonNull BiFunction<A, B, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerBiConstraintCollectors.max(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #max()}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #max(BiFunction, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, B, Mapped> BiConstraintCollector<A, B, ?, Mapped> max(BiFunction<A, B, Mapped> groupValueMapping,
Comparator<? super Mapped> comparator) {
return InnerBiConstraintCollectors.max(groupValueMapping, comparator);
}
/**
* As defined by {@link #max(Function)}.
*/
public static <A, B, C, Mapped extends Comparable<? super Mapped>> @NonNull TriConstraintCollector<A, B, C, ?, Mapped> max(
@NonNull TriFunction<A, B, C, Mapped> groupValueMapping) {
return InnerTriConstraintCollectors.max(groupValueMapping);
}
/**
* As defined by {@link #max(Function, Function)}, only with a custom {@link Comparator}.
*/
public static <A, B, C, Mapped, Comparable_ extends Comparable<? super Comparable_>>
@NonNull TriConstraintCollector<A, B, C, ?, Mapped>
max(@NonNull TriFunction<A, B, C, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerTriConstraintCollectors.max(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #max()}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #max(TriFunction, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, B, C, Mapped> TriConstraintCollector<A, B, C, ?, Mapped> max(
TriFunction<A, B, C, Mapped> groupValueMapping, Comparator<? super Mapped> comparator) {
return InnerTriConstraintCollectors.max(groupValueMapping, comparator);
}
/**
* As defined by {@link #max(Function)}.
*/
public static <A, B, C, D, Mapped extends Comparable<? super Mapped>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Mapped> max(
@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping) {
return InnerQuadConstraintCollectors.max(groupValueMapping);
}
/**
* As defined by {@link #max(Function, Function)}, only with a custom {@link Comparator}.
*/
public static <A, B, C, D, Mapped, Comparable_ extends Comparable<? super Comparable_>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Mapped>
max(@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping,
@NonNull Function<Mapped, Comparable_> comparableFunction) {
return InnerQuadConstraintCollectors.max(groupValueMapping, comparableFunction);
}
/**
* As defined by {@link #max()}, only with a custom {@link Comparator}.
*
* @deprecated Deprecated in favor of {@link #max(QuadFunction, Function)},
* as this method can lead to unavoidable score corruptions.
*/
@Deprecated(forRemoval = true, since = "1.0.0")
public static <A, B, C, D, Mapped> QuadConstraintCollector<A, B, C, D, ?, Mapped> max(
QuadFunction<A, B, C, D, Mapped> groupValueMapping, Comparator<? super Mapped> comparator) {
return InnerQuadConstraintCollectors.max(groupValueMapping, comparator);
}
/**
* @deprecated Prefer {@link #toList()}, {@link #toSet()} or {@link #toSortedSet()}
*/
@Deprecated(/* forRemoval = true */)
public static <A, Result extends Collection<A>> UniConstraintCollector<A, ?, Result> toCollection(
IntFunction<Result> collectionFunction) {
return toCollection(ConstantLambdaUtils.identity(), collectionFunction);
}
// ************************************************************************
// average
// ************************************************************************
/**
* Returns a collector that calculates an average of an {@code int} property of the elements that are being grouped.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]} with
* {@code .groupBy(average(Person::getAge))} returns {@code 25}.
* <p>
* The default result of the collector (e.g. when never called) is {@code null}.
*
* @param <A> type of the matched fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Double> average(@NonNull ToIntFunction<A> groupValueMapping) {
return InnerUniConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Double> averageLong(@NonNull ToLongFunction<A> groupValueMapping) {
return InnerUniConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
* The scale of the resulting {@link BigDecimal} will be equal to the scale of the sum of all the input tuples,
* with rounding mode {@link RoundingMode#HALF_EVEN}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, BigDecimal> averageBigDecimal(
@NonNull Function<A, BigDecimal> groupValueMapping) {
return InnerUniConstraintCollectors.averageBigDecimal(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
* The scale of the resulting {@link BigDecimal} will be equal to the scale of the sum of all the input tuples,
* with rounding mode {@link RoundingMode#HALF_EVEN}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, BigDecimal>
averageBigInteger(@NonNull Function<A, BigInteger> groupValueMapping) {
return InnerUniConstraintCollectors.averageBigInteger(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Duration>
averageDuration(@NonNull Function<A, Duration> groupValueMapping) {
return InnerUniConstraintCollectors.averageDuration(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Double>
average(@NonNull ToIntBiFunction<A, B> groupValueMapping) {
return InnerBiConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Double>
averageLong(@NonNull ToLongBiFunction<A, B> groupValueMapping) {
return InnerBiConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #averageBigDecimal(Function)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, BigDecimal>
averageBigDecimal(@NonNull BiFunction<A, B, BigDecimal> groupValueMapping) {
return InnerBiConstraintCollectors.averageBigDecimal(groupValueMapping);
}
/**
* As defined by {@link #averageBigInteger(Function)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, BigDecimal>
averageBigInteger(@NonNull BiFunction<A, B, BigInteger> groupValueMapping) {
return InnerBiConstraintCollectors.averageBigInteger(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B> @NonNull BiConstraintCollector<A, B, ?, Duration>
averageDuration(@NonNull BiFunction<A, B, Duration> groupValueMapping) {
return InnerBiConstraintCollectors.averageDuration(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Double>
average(@NonNull ToIntTriFunction<A, B, C> groupValueMapping) {
return InnerTriConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Double>
averageLong(@NonNull ToLongTriFunction<A, B, C> groupValueMapping) {
return InnerTriConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #averageBigDecimal(Function)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, BigDecimal>
averageBigDecimal(@NonNull TriFunction<A, B, C, BigDecimal> groupValueMapping) {
return InnerTriConstraintCollectors.averageBigDecimal(groupValueMapping);
}
/**
* As defined by {@link #averageBigInteger(Function)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, BigDecimal>
averageBigInteger(@NonNull TriFunction<A, B, C, BigInteger> groupValueMapping) {
return InnerTriConstraintCollectors.averageBigInteger(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B, C> @NonNull TriConstraintCollector<A, B, C, ?, Duration>
averageDuration(@NonNull TriFunction<A, B, C, Duration> groupValueMapping) {
return InnerTriConstraintCollectors.averageDuration(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Double>
average(@NonNull ToIntQuadFunction<A, B, C, D> groupValueMapping) {
return InnerQuadConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Double>
averageLong(@NonNull ToLongQuadFunction<A, B, C, D> groupValueMapping) {
return InnerQuadConstraintCollectors.average(groupValueMapping);
}
/**
* As defined by {@link #averageBigDecimal(Function)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, BigDecimal>
averageBigDecimal(@NonNull QuadFunction<A, B, C, D, BigDecimal> groupValueMapping) {
return InnerQuadConstraintCollectors.averageBigDecimal(groupValueMapping);
}
/**
* As defined by {@link #averageBigInteger(Function)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, BigDecimal>
averageBigInteger(@NonNull QuadFunction<A, B, C, D, BigInteger> groupValueMapping) {
return InnerQuadConstraintCollectors.averageBigInteger(groupValueMapping);
}
/**
* As defined by {@link #average(ToIntFunction)}.
*/
public static <A, B, C, D> @NonNull QuadConstraintCollector<A, B, C, D, ?, Duration>
averageDuration(@NonNull QuadFunction<A, B, C, D, Duration> groupValueMapping) {
return InnerQuadConstraintCollectors.averageDuration(groupValueMapping);
}
// ************************************************************************
// toCollection
// ************************************************************************
/**
* Creates constraint collector that returns {@link Set} of the same element type as the {@link ConstraintStream}.
* Makes no guarantees on iteration order.
* For stable iteration order, use {@link #toSortedSet()}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link Set}.
*
* @param <A> type of the matched fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, Set<A>> toSet() {
return toSet(ConstantLambdaUtils.identity());
}
/**
* Creates constraint collector that returns {@link SortedSet} of the same element type as the
* {@link ConstraintStream}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link SortedSet}.
*
* @param <A> type of the matched fact
*/
public static <A extends Comparable<A>> @NonNull UniConstraintCollector<A, ?, SortedSet<A>> toSortedSet() {
return toSortedSet(ConstantLambdaUtils.<A> identity());
}
/**
* As defined by {@link #toSortedSet()}, only with a custom {@link Comparator}.
*/
public static <A> @NonNull UniConstraintCollector<A, ?, SortedSet<A>>
toSortedSet(@NonNull Comparator<? super A> comparator) {
return toSortedSet(ConstantLambdaUtils.identity(), comparator);
}
/**
* Creates constraint collector that returns {@link List} of the same element type as the {@link ConstraintStream}.
* Makes no guarantees on iteration order.
* For stable iteration order, use {@link #toSortedSet()}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link List}.
*
* @param <A> type of the matched fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, List<A>> toList() {
return toList(ConstantLambdaUtils.identity());
}
/**
* @deprecated Prefer {@link #toList(Function)}, {@link #toSet(Function)} or {@link #toSortedSet(Function)}
*/
@Deprecated(/* forRemoval = true */)
public static <A, Mapped, Result extends Collection<Mapped>> UniConstraintCollector<A, ?, Result> toCollection(
Function<A, Mapped> groupValueMapping, IntFunction<Result> collectionFunction) {
return InnerUniConstraintCollectors.toCollection(groupValueMapping, collectionFunction);
}
/**
* Creates constraint collector that returns {@link Set} of the same element type as the {@link ConstraintStream}.
* Makes no guarantees on iteration order.
* For stable iteration order, use {@link #toSortedSet()}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link Set}.
*
* @param groupValueMapping converts matched facts to elements of the resulting set
* @param <A> type of the matched fact
* @param <Mapped> type of elements in the resulting set
*/
public static <A, Mapped> @NonNull UniConstraintCollector<A, ?, Set<Mapped>>
toSet(@NonNull Function<A, Mapped> groupValueMapping) {
return InnerUniConstraintCollectors.toSet(groupValueMapping);
}
/**
* Creates constraint collector that returns {@link SortedSet} of the same element type as the
* {@link ConstraintStream}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link SortedSet}.
*
* @param groupValueMapping converts matched facts to elements of the resulting set
* @param <A> type of the matched fact
* @param <Mapped> type of elements in the resulting set
*/
public static <A, Mapped extends Comparable<? super Mapped>> @NonNull UniConstraintCollector<A, ?, SortedSet<Mapped>>
toSortedSet(@NonNull Function<A, Mapped> groupValueMapping) {
return toSortedSet(groupValueMapping, Comparator.naturalOrder());
}
/**
* As defined by {@link #toSortedSet(Function)}, only with a custom {@link Comparator}.
*/
public static <A, Mapped> @NonNull UniConstraintCollector<A, ?, SortedSet<Mapped>> toSortedSet(
@NonNull Function<A, Mapped> groupValueMapping, @NonNull Comparator<? super Mapped> comparator) {
return InnerUniConstraintCollectors.toSortedSet(groupValueMapping, comparator);
}
/**
* Creates constraint collector that returns {@link List} of the given element type.
* Makes no guarantees on iteration order.
* For stable iteration order, use {@link #toSortedSet(Function)}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link List}.
*
* @param groupValueMapping converts matched facts to elements of the resulting collection
* @param <A> type of the matched fact
* @param <Mapped> type of elements in the resulting collection
*/
public static <A, Mapped> @NonNull UniConstraintCollector<A, ?, List<Mapped>>
toList(@NonNull Function<A, Mapped> groupValueMapping) {
return InnerUniConstraintCollectors.toList(groupValueMapping);
}
/**
* @deprecated Prefer {@link #toList(BiFunction)}, {@link #toSet(BiFunction)}
* or {@link #toSortedSet(BiFunction)}
*/
@Deprecated(/* forRemoval = true */)
public static <A, B, Mapped, Result extends Collection<Mapped>> BiConstraintCollector<A, B, ?, Result> toCollection(
BiFunction<A, B, Mapped> groupValueMapping, IntFunction<Result> collectionFunction) {
return InnerBiConstraintCollectors.toCollection(groupValueMapping, collectionFunction);
}
/**
* As defined by {@link #toSet(Function)}.
*/
public static <A, B, Mapped> @NonNull BiConstraintCollector<A, B, ?, Set<Mapped>> toSet(
@NonNull BiFunction<A, B, Mapped> groupValueMapping) {
return InnerBiConstraintCollectors.toSet(groupValueMapping);
}
/**
* As defined by {@link #toSortedSet(Function)}.
*/
public static <A, B, Mapped extends Comparable<? super Mapped>> @NonNull BiConstraintCollector<A, B, ?, SortedSet<Mapped>>
toSortedSet(@NonNull BiFunction<A, B, Mapped> groupValueMapping) {
return toSortedSet(groupValueMapping, Comparator.naturalOrder());
}
/**
* As defined by {@link #toSortedSet(Function, Comparator)}.
*/
public static <A, B, Mapped> @NonNull BiConstraintCollector<A, B, ?, SortedSet<Mapped>> toSortedSet(
@NonNull BiFunction<A, B, Mapped> groupValueMapping, @NonNull Comparator<? super Mapped> comparator) {
return InnerBiConstraintCollectors.toSortedSet(groupValueMapping, comparator);
}
/**
* As defined by {@link #toList(Function)}.
*/
public static <A, B, Mapped> @NonNull BiConstraintCollector<A, B, ?, List<Mapped>> toList(
@NonNull BiFunction<A, B, Mapped> groupValueMapping) {
return InnerBiConstraintCollectors.toList(groupValueMapping);
}
/**
* @deprecated Prefer {@link #toList(TriFunction)}, {@link #toSet(TriFunction)}
* or {@link #toSortedSet(TriFunction)}
*/
@Deprecated(/* forRemoval = true */)
public static <A, B, C, Mapped, Result extends Collection<Mapped>> TriConstraintCollector<A, B, C, ?, Result> toCollection(
TriFunction<A, B, C, Mapped> groupValueMapping, IntFunction<Result> collectionFunction) {
return InnerTriConstraintCollectors.toCollection(groupValueMapping, collectionFunction);
}
/**
* As defined by {@link #toSet(Function)}.
*/
public static <A, B, C, Mapped> @NonNull TriConstraintCollector<A, B, C, ?, Set<Mapped>> toSet(
@NonNull TriFunction<A, B, C, Mapped> groupValueMapping) {
return InnerTriConstraintCollectors.toSet(groupValueMapping);
}
/**
* As defined by {@link #toSortedSet(Function)}.
*/
public static <A, B, C, Mapped extends Comparable<? super Mapped>>
@NonNull TriConstraintCollector<A, B, C, ?, SortedSet<Mapped>>
toSortedSet(@NonNull TriFunction<A, B, C, Mapped> groupValueMapping) {
return toSortedSet(groupValueMapping, Comparator.naturalOrder());
}
/**
* As defined by {@link #toSortedSet(Function, Comparator)}.
*/
public static <A, B, C, Mapped> @NonNull TriConstraintCollector<A, B, C, ?, SortedSet<Mapped>> toSortedSet(
@NonNull TriFunction<A, B, C, Mapped> groupValueMapping, @NonNull Comparator<? super Mapped> comparator) {
return InnerTriConstraintCollectors.toSortedSet(groupValueMapping, comparator);
}
/**
* As defined by {@link #toList(Function)}.
*/
public static <A, B, C, Mapped> @NonNull TriConstraintCollector<A, B, C, ?, List<Mapped>> toList(
@NonNull TriFunction<A, B, C, Mapped> groupValueMapping) {
return InnerTriConstraintCollectors.toList(groupValueMapping);
}
/**
* @deprecated Prefer {@link #toList(QuadFunction)}, {@link #toSet(QuadFunction)}
* or {@link #toSortedSet(QuadFunction)}
*/
@Deprecated(/* forRemoval = true */)
public static <A, B, C, D, Mapped, Result extends Collection<Mapped>> QuadConstraintCollector<A, B, C, D, ?, Result>
toCollection(QuadFunction<A, B, C, D, Mapped> groupValueMapping, IntFunction<Result> collectionFunction) {
return InnerQuadConstraintCollectors.toCollection(groupValueMapping, collectionFunction);
}
/**
* As defined by {@link #toSet(Function)}.
*/
public static <A, B, C, D, Mapped> @NonNull QuadConstraintCollector<A, B, C, D, ?, Set<Mapped>> toSet(
@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping) {
return InnerQuadConstraintCollectors.toSet(groupValueMapping);
}
/**
* As defined by {@link #toSortedSet(Function)}.
*/
public static <A, B, C, D, Mapped extends Comparable<? super Mapped>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, SortedSet<Mapped>>
toSortedSet(@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping) {
return toSortedSet(groupValueMapping, Comparator.naturalOrder());
}
/**
* As defined by {@link #toSortedSet(Function, Comparator)}.
*/
public static <A, B, C, D, Mapped> @NonNull QuadConstraintCollector<A, B, C, D, ?, SortedSet<Mapped>> toSortedSet(
@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping, @NonNull Comparator<? super Mapped> comparator) {
return InnerQuadConstraintCollectors.toSortedSet(groupValueMapping, comparator);
}
/**
* As defined by {@link #toList(Function)}.
*/
public static <A, B, C, D, Mapped> @NonNull QuadConstraintCollector<A, B, C, D, ?, List<Mapped>> toList(
@NonNull QuadFunction<A, B, C, D, Mapped> groupValueMapping) {
return InnerQuadConstraintCollectors.toList(groupValueMapping);
}
// ************************************************************************
// toMap
// ************************************************************************
/**
* Creates a constraint collector that returns a {@link Map} with given keys and values consisting of a
* {@link Set} of mappings.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]}
* with {@code .groupBy(toMap(Person::getAge, Person::getName))} returns
* {@code {20: [Ann, Eric], 25: [Beth], 30: [Cathy, David]}}.
* <p>
* Makes no guarantees on iteration order, neither for map entries, nor for the value sets.
* For stable iteration order, use {@link #toSortedMap(Function, Function, IntFunction)}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link Map}.
*
* @param keyMapper map matched fact to a map key
* @param valueMapper map matched fact to a value
* @param <A> type of the matched fact
* @param <Key> type of map key
* @param <Value> type of map value
*/
public static <A, Key, Value> @NonNull UniConstraintCollector<A, ?, Map<Key, Set<Value>>> toMap(
@NonNull Function<? super A, ? extends Key> keyMapper, @NonNull Function<? super A, ? extends Value> valueMapper) {
return toMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* Creates a constraint collector that returns a {@link Map} with given keys and values consisting of a
* {@link Set} of mappings.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]}
* with {@code .groupBy(toMap(Person::getAge, Person::getName))} returns
* {@code {20: [Ann, Eric], 25: [Beth], 30: [Cathy, David]}}.
* <p>
* Iteration order of value collections depends on the {@link Set} provided.
* Makes no guarantees on iteration order for map entries, use {@link #toSortedMap(Function, Function, IntFunction)}
* for that.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link Map}.
*
* @param keyMapper map matched fact to a map key
* @param valueMapper map matched fact to a value
* @param valueSetFunction creates a set that will be used to store value mappings
* @param <A> type of the matched fact
* @param <Key> type of map key
* @param <Value> type of map value
* @param <ValueSet> type of the value set
*/
public static <A, Key, Value, ValueSet extends Set<Value>> @NonNull UniConstraintCollector<A, ?, Map<Key, ValueSet>> toMap(
@NonNull Function<? super A, ? extends Key> keyMapper, @NonNull Function<? super A, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerUniConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, valueSetFunction);
}
/**
* Creates a constraint collector that returns a {@link Map}.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]}
* with {@code .groupBy(toMap(Person::getAge, Person::getName, (name1, name2) -> name1 + " and " + name2)} returns
* {@code {20: "Ann and Eric", 25: "Beth", 30: "Cathy and David"}}.
* <p>
* Makes no guarantees on iteration order for map entries.
* For stable iteration order, use {@link #toSortedMap(Function, Function, BinaryOperator)}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link Map}.
*
* @param keyMapper map matched fact to a map key
* @param valueMapper map matched fact to a value
* @param mergeFunction takes two values and merges them to one
* @param <A> type of the matched fact
* @param <Key> type of map key
* @param <Value> type of map value
*/
public static <A, Key, Value> @NonNull UniConstraintCollector<A, ?, Map<Key, Value>> toMap(
@NonNull Function<? super A, ? extends Key> keyMapper, @NonNull Function<? super A, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerUniConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, mergeFunction);
}
/**
* Creates a constraint collector that returns a {@link SortedMap} with given keys and values consisting of a
* {@link Set} of mappings.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]}
* with {@code .groupBy(toMap(Person::getAge, Person::getName))} returns
* {@code {20: [Ann, Eric], 25: [Beth], 30: [Cathy, David]}}.
* <p>
* Makes no guarantees on iteration order for the value sets, use
* {@link #toSortedMap(Function, Function, IntFunction)} for that.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link SortedMap}.
*
* @param keyMapper map matched fact to a map key
* @param valueMapper map matched fact to a value
* @param <A> type of the matched fact
* @param <Key> type of map key
* @param <Value> type of map value
*/
public static <A, Key extends Comparable<? super Key>, Value>
@NonNull UniConstraintCollector<A, ?, SortedMap<Key, Set<Value>>>
toSortedMap(@NonNull Function<? super A, ? extends Key> keyMapper,
@NonNull Function<? super A, ? extends Value> valueMapper) {
return toSortedMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* Creates a constraint collector that returns a {@link SortedMap} with given keys and values consisting of a
* {@link Set} of mappings.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]}
* with {@code .groupBy(toMap(Person::getAge, Person::getName))} returns
* {@code {20: [Ann, Eric], 25: [Beth], 30: [Cathy, David]}}.
* <p>
* Iteration order of value collections depends on the {@link Set} provided.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link SortedMap}.
*
* @param keyMapper map matched fact to a map key
* @param valueMapper map matched fact to a value
* @param valueSetFunction creates a set that will be used to store value mappings
* @param <A> type of the matched fact
* @param <Key> type of map key
* @param <Value> type of map value
* @param <ValueSet> type of the value set
*/
public static <A, Key extends Comparable<? super Key>, Value, ValueSet extends Set<Value>>
@NonNull UniConstraintCollector<A, ?, SortedMap<Key, ValueSet>> toSortedMap(
@NonNull Function<? super A, ? extends Key> keyMapper,
@NonNull Function<? super A, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerUniConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, valueSetFunction);
}
/**
* Creates a constraint collector that returns a {@link SortedMap}.
* <p>
* For example, {@code [Ann(age = 20), Beth(age = 25), Cathy(age = 30), David(age = 30), Eric(age = 20)]}
* with {@code .groupBy(toMap(Person::getAge, Person::getName, (name1, name2) -> name1 + " and " + name2)} returns
* {@code {20: "Ann and Eric", 25: "Beth", 30: "Cathy and David"}}.
* <p>
* The default result of the collector (e.g. when never called) is an empty {@link SortedMap}.
*
* @param keyMapper map matched fact to a map key
* @param valueMapper map matched fact to a value
* @param mergeFunction takes two values and merges them to one
* @param <A> type of the matched fact
* @param <Key> type of map key
* @param <Value> type of map value
*/
public static <A, Key extends Comparable<? super Key>, Value> @NonNull UniConstraintCollector<A, ?, SortedMap<Key, Value>>
toSortedMap(
@NonNull Function<? super A, ? extends Key> keyMapper,
@NonNull Function<? super A, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerUniConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, mergeFunction);
}
/**
* As defined by {@link #toMap(Function, Function)}.
*/
public static <A, B, Key, Value> @NonNull BiConstraintCollector<A, B, ?, Map<Key, Set<Value>>> toMap(
@NonNull BiFunction<? super A, ? super B, ? extends Key> keyMapper,
@NonNull BiFunction<? super A, ? super B, ? extends Value> valueMapper) {
return toMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* As defined by {@link #toMap(Function, Function, IntFunction)}.
*/
public static <A, B, Key, Value, ValueSet extends Set<Value>> @NonNull BiConstraintCollector<A, B, ?, Map<Key, ValueSet>>
toMap(
@NonNull BiFunction<? super A, ? super B, ? extends Key> keyMapper,
@NonNull BiFunction<? super A, ? super B, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerBiConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, valueSetFunction);
}
/**
* As defined by {@link #toMap(Function, Function, BinaryOperator)}.
*/
public static <A, B, Key, Value> @NonNull BiConstraintCollector<A, B, ?, Map<Key, Value>> toMap(
@NonNull BiFunction<? super A, ? super B, ? extends Key> keyMapper,
@NonNull BiFunction<? super A, ? super B, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerBiConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, mergeFunction);
}
/**
* As defined by {@link #toSortedMap(Function, Function)}.
*/
public static <A, B, Key extends Comparable<? super Key>, Value>
@NonNull BiConstraintCollector<A, B, ?, SortedMap<Key, Set<Value>>>
toSortedMap(@NonNull BiFunction<? super A, ? super B, ? extends Key> keyMapper,
@NonNull BiFunction<? super A, ? super B, ? extends Value> valueMapper) {
return toSortedMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* As defined by {@link #toSortedMap(Function, Function, IntFunction)}.
*/
public static <A, B, Key extends Comparable<? super Key>, Value, ValueSet extends Set<Value>>
@NonNull BiConstraintCollector<A, B, ?, SortedMap<Key, ValueSet>> toSortedMap(
@NonNull BiFunction<? super A, ? super B, ? extends Key> keyMapper,
@NonNull BiFunction<? super A, ? super B, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerBiConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, valueSetFunction);
}
/**
* As defined by {@link #toSortedMap(Function, Function, BinaryOperator)}.
*/
public static <A, B, Key extends Comparable<? super Key>, Value>
@NonNull BiConstraintCollector<A, B, ?, SortedMap<Key, Value>> toSortedMap(
@NonNull BiFunction<? super A, ? super B, ? extends Key> keyMapper,
@NonNull BiFunction<? super A, ? super B, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerBiConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, mergeFunction);
}
/**
* As defined by {@link #toMap(Function, Function)}.
*/
public static <A, B, C, Key, Value> @NonNull TriConstraintCollector<A, B, C, ?, Map<Key, Set<Value>>> toMap(
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Key> keyMapper,
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Value> valueMapper) {
return toMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* As defined by {@link #toMap(Function, Function, IntFunction)}.
*/
public static <A, B, C, Key, Value, ValueSet extends Set<Value>>
@NonNull TriConstraintCollector<A, B, C, ?, Map<Key, ValueSet>>
toMap(@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Key> keyMapper,
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerTriConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, valueSetFunction);
}
/**
* As defined by {@link #toMap(Function, Function, BinaryOperator)}.
*/
public static <A, B, C, Key, Value> @NonNull TriConstraintCollector<A, B, C, ?, Map<Key, Value>> toMap(
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Key> keyMapper,
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerTriConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, mergeFunction);
}
/**
* As defined by {@link #toSortedMap(Function, Function)}.
*/
public static <A, B, C, Key extends Comparable<? super Key>, Value>
@NonNull TriConstraintCollector<A, B, C, ?, SortedMap<Key, Set<Value>>>
toSortedMap(@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Key> keyMapper,
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Value> valueMapper) {
return toSortedMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* As defined by {@link #toSortedMap(Function, Function, IntFunction)}.
*/
public static <A, B, C, Key extends Comparable<? super Key>, Value, ValueSet extends Set<Value>>
@NonNull TriConstraintCollector<A, B, C, ?, SortedMap<Key, ValueSet>> toSortedMap(
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Key> keyMapper,
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerTriConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, valueSetFunction);
}
/**
* As defined by {@link #toSortedMap(Function, Function, BinaryOperator)}.
*/
public static <A, B, C, Key extends Comparable<? super Key>, Value>
@NonNull TriConstraintCollector<A, B, C, ?, SortedMap<Key, Value>> toSortedMap(
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Key> keyMapper,
@NonNull TriFunction<? super A, ? super B, ? super C, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerTriConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, mergeFunction);
}
/**
* As defined by {@link #toMap(Function, Function)}.
*/
public static <A, B, C, D, Key, Value> @NonNull QuadConstraintCollector<A, B, C, D, ?, Map<Key, Set<Value>>> toMap(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Key> keyMapper,
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Value> valueMapper) {
return toMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* As defined by {@link #toMap(Function, Function, IntFunction)}.
*/
public static <A, B, C, D, Key, Value, ValueSet extends Set<Value>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Map<Key, ValueSet>> toMap(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Key> keyMapper,
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerQuadConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, valueSetFunction);
}
/**
* As defined by {@link #toMap(Function, Function, BinaryOperator)}.
*/
public static <A, B, C, D, Key, Value> @NonNull QuadConstraintCollector<A, B, C, D, ?, Map<Key, Value>> toMap(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Key> keyMapper,
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerQuadConstraintCollectors.toMap(keyMapper, valueMapper, HashMap::new, mergeFunction);
}
/**
* As defined by {@link #toSortedMap(Function, Function)}.
*/
public static <A, B, C, D, Key extends Comparable<? super Key>, Value>
@NonNull QuadConstraintCollector<A, B, C, D, ?, SortedMap<Key, Set<Value>>> toSortedMap(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Key> keyMapper,
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Value> valueMapper) {
return toSortedMap(keyMapper, valueMapper, (IntFunction<Set<Value>>) LinkedHashSet::new);
}
/**
* As defined by {@link #toSortedMap(Function, Function, IntFunction)}.
*/
public static <A, B, C, D, Key extends Comparable<? super Key>, Value, ValueSet extends Set<Value>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, SortedMap<Key, ValueSet>> toSortedMap(
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Key> keyMapper,
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Value> valueMapper,
@NonNull IntFunction<ValueSet> valueSetFunction) {
return InnerQuadConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, valueSetFunction);
}
/**
* As defined by {@link #toSortedMap(Function, Function, BinaryOperator)}.
*/
public static <A, B, C, D, Key extends Comparable<? super Key>, Value>
@NonNull QuadConstraintCollector<A, B, C, D, ?, SortedMap<Key, Value>>
toSortedMap(@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Key> keyMapper,
@NonNull QuadFunction<? super A, ? super B, ? super C, ? super D, ? extends Value> valueMapper,
@NonNull BinaryOperator<Value> mergeFunction) {
return InnerQuadConstraintCollectors.toMap(keyMapper, valueMapper, TreeMap::new, mergeFunction);
}
// ************************************************************************
// conditional collectors
// ************************************************************************
/**
* Returns a collector that delegates to the underlying collector
* if and only if the input tuple meets the given condition.
*
* <p>
* The result of the collector is always the underlying collector's result.
* Therefore the default result of the collector (e.g. when never called) is the default result of the underlying collector.
*
* @param condition condition to meet in order to delegate to the underlying collector
* @param delegate the underlying collector to delegate to
* @param <A> generic type of the tuple variable
* @param <ResultContainer_> generic type of the result container
* @param <Result_> generic type of the collector's return value
*/
public static <A, ResultContainer_, Result_> @NonNull UniConstraintCollector<A, ResultContainer_, Result_> conditionally(
@NonNull Predicate<A> condition, @NonNull UniConstraintCollector<A, ResultContainer_, Result_> delegate) {
return InnerUniConstraintCollectors.conditionally(condition, delegate);
}
/**
* As defined by {@link #conditionally(Predicate, UniConstraintCollector)}.
*/
public static <A, B, ResultContainer_, Result_> @NonNull BiConstraintCollector<A, B, ResultContainer_, Result_>
conditionally(@NonNull BiPredicate<A, B> condition,
@NonNull BiConstraintCollector<A, B, ResultContainer_, Result_> delegate) {
return InnerBiConstraintCollectors.conditionally(condition, delegate);
}
/**
* As defined by {@link #conditionally(Predicate, UniConstraintCollector)}.
*/
public static <A, B, C, ResultContainer_, Result_> @NonNull TriConstraintCollector<A, B, C, ResultContainer_, Result_>
conditionally(@NonNull TriPredicate<A, B, C> condition,
@NonNull TriConstraintCollector<A, B, C, ResultContainer_, Result_> delegate) {
return InnerTriConstraintCollectors.conditionally(condition, delegate);
}
/**
* As defined by {@link #conditionally(Predicate, UniConstraintCollector)}.
*/
public static <A, B, C, D, ResultContainer_, Result_>
@NonNull QuadConstraintCollector<A, B, C, D, ResultContainer_, Result_>
conditionally(@NonNull QuadPredicate<A, B, C, D> condition,
@NonNull QuadConstraintCollector<A, B, C, D, ResultContainer_, Result_> delegate) {
return InnerQuadConstraintCollectors.conditionally(condition, delegate);
}
// ************************************************************************
// forwarding collectors
// ************************************************************************
/**
* Returns a collector that delegates to the underlying collector
* and maps its result to another value.
* <p>
* This is a better performing alternative to {@code .groupBy(...).map(...)}.
*
* @param <A> generic type of the tuple variable
* @param <Intermediate_> generic type of the delegate's return value
* @param <Result_> generic type of the final colector's return value
* @param delegate the underlying collector to delegate to
* @param mappingFunction maps the result of the underlying collector to another value
*/
public static <A, Intermediate_, Result_> @NonNull UniConstraintCollector<A, ?, Result_>
collectAndThen(@NonNull UniConstraintCollector<A, ?, Intermediate_> delegate,
@NonNull Function<Intermediate_, Result_> mappingFunction) {
return InnerUniConstraintCollectors.collectAndThen(delegate, mappingFunction);
}
/**
* As defined by {@link #collectAndThen(UniConstraintCollector, Function)}.
*/
public static <A, B, Intermediate_, Result_> @NonNull BiConstraintCollector<A, B, ?, Result_>
collectAndThen(@NonNull BiConstraintCollector<A, B, ?, Intermediate_> delegate,
@NonNull Function<Intermediate_, Result_> mappingFunction) {
return InnerBiConstraintCollectors.collectAndThen(delegate, mappingFunction);
}
/**
* As defined by {@link #collectAndThen(UniConstraintCollector, Function)}.
*/
public static <A, B, C, Intermediate_, Result_> @NonNull TriConstraintCollector<A, B, C, ?, Result_>
collectAndThen(@NonNull TriConstraintCollector<A, B, C, ?, Intermediate_> delegate,
@NonNull Function<Intermediate_, Result_> mappingFunction) {
return InnerTriConstraintCollectors.collectAndThen(delegate, mappingFunction);
}
/**
* As defined by {@link #collectAndThen(UniConstraintCollector, Function)}.
*/
public static <A, B, C, D, Intermediate_, Result_> @NonNull QuadConstraintCollector<A, B, C, D, ?, Result_>
collectAndThen(@NonNull QuadConstraintCollector<A, B, C, D, ?, Intermediate_> delegate,
@NonNull Function<Intermediate_, Result_> mappingFunction) {
return InnerQuadConstraintCollectors.collectAndThen(delegate, mappingFunction);
}
// ************************************************************************
// composite collectors
// ************************************************************************
/**
* Returns a constraint collector the result of which is a composition of other constraint collectors.
* The return value of this collector, incl. the default return value, depends solely on the compose function.
*
* @param subCollector1 first collector to compose
* @param subCollector2 second collector to compose
* @param composeFunction turns results of the sub collectors to a result of the parent collector
* @param <A> generic type of the tuple variable
* @param <Result_> generic type of the parent collector's return value
* @param <SubResultContainer1_> generic type of the first sub collector's result container
* @param <SubResultContainer2_> generic type of the second sub collector's result container
* @param <SubResult1_> generic type of the first sub collector's return value
* @param <SubResult2_> generic type of the second sub collector's return value
*/
public static <A, Result_, SubResultContainer1_, SubResultContainer2_, SubResult1_, SubResult2_>
@NonNull UniConstraintCollector<A, ?, Result_> compose(
@NonNull UniConstraintCollector<A, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull UniConstraintCollector<A, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull BiFunction<SubResult1_, SubResult2_, Result_> composeFunction) {
return InnerUniConstraintCollectors.compose(subCollector1, subCollector2, composeFunction);
}
/**
* Returns a constraint collector the result of which is a composition of other constraint collectors.
* The return value of this collector, incl. the default return value, depends solely on the compose function.
*
* @param subCollector1 first collector to compose
* @param subCollector2 second collector to compose
* @param subCollector3 third collector to compose
* @param composeFunction turns results of the sub collectors to a result of the parent collector
* @param <A> generic type of the tuple variable
* @param <Result_> generic type of the parent collector's return value
* @param <SubResultContainer1_> generic type of the first sub collector's result container
* @param <SubResultContainer2_> generic type of the second sub collector's result container
* @param <SubResultContainer3_> generic type of the third sub collector's result container
* @param <SubResult1_> generic type of the first sub collector's return value
* @param <SubResult2_> generic type of the second sub collector's return value
* @param <SubResult3_> generic type of the third sub collector's return value
*/
public static <A, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResult1_, SubResult2_, SubResult3_>
@NonNull UniConstraintCollector<A, ?, Result_> compose(
@NonNull UniConstraintCollector<A, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull UniConstraintCollector<A, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull UniConstraintCollector<A, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull TriFunction<SubResult1_, SubResult2_, SubResult3_, Result_> composeFunction) {
return InnerUniConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, composeFunction);
}
/**
* Returns a constraint collector the result of which is a composition of other constraint collectors.
* The return value of this collector, incl. the default return value, depends solely on the compose function.
*
* @param subCollector1 first collector to compose
* @param subCollector2 second collector to compose
* @param subCollector3 third collector to compose
* @param subCollector4 fourth collector to compose
* @param composeFunction turns results of the sub collectors to a result of the parent collector
* @param <A> generic type of the tuple variable
* @param <Result_> generic type of the parent collector's return value
* @param <SubResultContainer1_> generic type of the first sub collector's result container
* @param <SubResultContainer2_> generic type of the second sub collector's result container
* @param <SubResultContainer3_> generic type of the third sub collector's result container
* @param <SubResultContainer4_> generic type of the fourth sub collector's result container
* @param <SubResult1_> generic type of the first sub collector's return value
* @param <SubResult2_> generic type of the second sub collector's return value
* @param <SubResult3_> generic type of the third sub collector's return value
* @param <SubResult4_> generic type of the fourth sub collector's return value
*/
public static <A, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResultContainer4_, SubResult1_, SubResult2_, SubResult3_, SubResult4_>
@NonNull UniConstraintCollector<A, ?, Result_> compose(
@NonNull UniConstraintCollector<A, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull UniConstraintCollector<A, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull UniConstraintCollector<A, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull UniConstraintCollector<A, SubResultContainer4_, SubResult4_> subCollector4,
@NonNull QuadFunction<SubResult1_, SubResult2_, SubResult3_, SubResult4_, Result_> composeFunction) {
return InnerUniConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, subCollector4,
composeFunction);
}
/**
* As defined by {@link #compose(UniConstraintCollector, UniConstraintCollector, BiFunction)}.
*/
public static <A, B, Result_, SubResultContainer1_, SubResultContainer2_, SubResult1_, SubResult2_>
@NonNull BiConstraintCollector<A, B, ?, Result_> compose(
@NonNull BiConstraintCollector<A, B, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull BiConstraintCollector<A, B, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull BiFunction<SubResult1_, SubResult2_, Result_> composeFunction) {
return InnerBiConstraintCollectors.compose(subCollector1, subCollector2, composeFunction);
}
/**
* As defined by {@link #compose(UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, TriFunction)}.
*/
public static <A, B, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResult1_, SubResult2_, SubResult3_>
@NonNull BiConstraintCollector<A, B, ?, Result_> compose(
@NonNull BiConstraintCollector<A, B, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull BiConstraintCollector<A, B, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull BiConstraintCollector<A, B, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull TriFunction<SubResult1_, SubResult2_, SubResult3_, Result_> composeFunction) {
return InnerBiConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, composeFunction);
}
/**
* As defined by
* {@link #compose(UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, QuadFunction)}.
*/
public static <A, B, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResultContainer4_, SubResult1_, SubResult2_, SubResult3_, SubResult4_>
@NonNull BiConstraintCollector<A, B, ?, Result_> compose(
@NonNull BiConstraintCollector<A, B, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull BiConstraintCollector<A, B, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull BiConstraintCollector<A, B, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull BiConstraintCollector<A, B, SubResultContainer4_, SubResult4_> subCollector4,
@NonNull QuadFunction<SubResult1_, SubResult2_, SubResult3_, SubResult4_, Result_> composeFunction) {
return InnerBiConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, subCollector4, composeFunction);
}
/**
* As defined by {@link #compose(UniConstraintCollector, UniConstraintCollector, BiFunction)}.
*/
public static <A, B, C, Result_, SubResultContainer1_, SubResultContainer2_, SubResult1_, SubResult2_>
@NonNull TriConstraintCollector<A, B, C, ?, Result_> compose(
@NonNull TriConstraintCollector<A, B, C, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull TriConstraintCollector<A, B, C, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull BiFunction<SubResult1_, SubResult2_, Result_> composeFunction) {
return InnerTriConstraintCollectors.compose(subCollector1, subCollector2, composeFunction);
}
/**
* As defined by {@link #compose(UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, TriFunction)}.
*/
public static <A, B, C, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResult1_, SubResult2_, SubResult3_>
@NonNull TriConstraintCollector<A, B, C, ?, Result_> compose(
@NonNull TriConstraintCollector<A, B, C, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull TriConstraintCollector<A, B, C, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull TriConstraintCollector<A, B, C, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull TriFunction<SubResult1_, SubResult2_, SubResult3_, Result_> composeFunction) {
return InnerTriConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, composeFunction);
}
/**
* As defined by
* {@link #compose(UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, QuadFunction)}.
*/
public static <A, B, C, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResultContainer4_, SubResult1_, SubResult2_, SubResult3_, SubResult4_>
@NonNull TriConstraintCollector<A, B, C, ?, Result_> compose(
@NonNull TriConstraintCollector<A, B, C, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull TriConstraintCollector<A, B, C, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull TriConstraintCollector<A, B, C, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull TriConstraintCollector<A, B, C, SubResultContainer4_, SubResult4_> subCollector4,
@NonNull QuadFunction<SubResult1_, SubResult2_, SubResult3_, SubResult4_, Result_> composeFunction) {
return InnerTriConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, subCollector4,
composeFunction);
}
/**
* As defined by {@link #compose(UniConstraintCollector, UniConstraintCollector, BiFunction)}.
*/
public static <A, B, C, D, Result_, SubResultContainer1_, SubResultContainer2_, SubResult1_, SubResult2_>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Result_> compose(
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull BiFunction<SubResult1_, SubResult2_, Result_> composeFunction) {
return InnerQuadConstraintCollectors.compose(subCollector1, subCollector2, composeFunction);
}
/**
* As defined by {@link #compose(UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, TriFunction)}.
*/
public static <A, B, C, D, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResult1_, SubResult2_, SubResult3_>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Result_> compose(
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull TriFunction<SubResult1_, SubResult2_, SubResult3_, Result_> composeFunction) {
return InnerQuadConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, composeFunction);
}
/**
* As defined by
* {@link #compose(UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, UniConstraintCollector, QuadFunction)}.
*/
public static <A, B, C, D, Result_, SubResultContainer1_, SubResultContainer2_, SubResultContainer3_, SubResultContainer4_, SubResult1_, SubResult2_, SubResult3_, SubResult4_>
@NonNull QuadConstraintCollector<A, B, C, D, ?, Result_> compose(
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer1_, SubResult1_> subCollector1,
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer2_, SubResult2_> subCollector2,
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer3_, SubResult3_> subCollector3,
@NonNull QuadConstraintCollector<A, B, C, D, SubResultContainer4_, SubResult4_> subCollector4,
@NonNull QuadFunction<SubResult1_, SubResult2_, SubResult3_, SubResult4_, Result_> composeFunction) {
return InnerQuadConstraintCollectors.compose(subCollector1, subCollector2, subCollector3, subCollector4,
composeFunction);
}
// ************************************************************************
// consecutive collectors
// ************************************************************************
/**
* Creates a constraint collector that returns {@link SequenceChain} about the first fact.
*
* For instance, {@code [Shift slot=1] [Shift slot=2] [Shift slot=4] [Shift slot=6]}
* returns the following information:
*
* <pre>
* {@code
* Consecutive Lengths: 2, 1, 1
* Break Lengths: 2, 2
* Consecutive Items: [[Shift slot=1] [Shift slot=2]], [[Shift slot=4]], [[Shift slot=6]]
* }
* </pre>
*
* @param indexMap Maps the fact to its position in the sequence
* @param <A> type of the first mapped fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, SequenceChain<A, Integer>>
toConsecutiveSequences(@NonNull ToIntFunction<A> indexMap) {
return InnerUniConstraintCollectors.toConsecutiveSequences(indexMap);
}
/**
* As defined by {@link #toConsecutiveSequences(ToIntFunction)}.
*
* @param resultMap Maps both facts to an item in the sequence
* @param indexMap Maps the item to its position in the sequence
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <Result_> type of item in the sequence
*/
public static <A, B, Result_> @NonNull BiConstraintCollector<A, B, ?, SequenceChain<Result_, Integer>>
toConsecutiveSequences(@NonNull BiFunction<A, B, Result_> resultMap, @NonNull ToIntFunction<Result_> indexMap) {
return InnerBiConstraintCollectors.toConsecutiveSequences(resultMap, indexMap);
}
/**
* As defined by {@link #toConsecutiveSequences(ToIntFunction)}.
*
* @param resultMap Maps the three facts to an item in the sequence
* @param indexMap Maps the item to its position in the sequence
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <Result_> type of item in the sequence
*/
public static <A, B, C, Result_> @NonNull TriConstraintCollector<A, B, C, ?, SequenceChain<Result_, Integer>>
toConsecutiveSequences(@NonNull TriFunction<A, B, C, Result_> resultMap, @NonNull ToIntFunction<Result_> indexMap) {
return InnerTriConstraintCollectors.toConsecutiveSequences(resultMap, indexMap);
}
/**
* As defined by {@link #toConsecutiveSequences(ToIntFunction)}.
*
* @param resultMap Maps the four facts to an item in the sequence
* @param indexMap Maps the item to its position in the sequence
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <D> type of the fourth mapped fact
* @param <Result_> type of item in the sequence
*/
public static <A, B, C, D, Result_> @NonNull QuadConstraintCollector<A, B, C, D, ?, SequenceChain<Result_, Integer>>
toConsecutiveSequences(@NonNull QuadFunction<A, B, C, D, Result_> resultMap,
@NonNull ToIntFunction<Result_> indexMap) {
return InnerQuadConstraintCollectors.toConsecutiveSequences(resultMap, indexMap);
}
// *****************************************************************
// toConnectedRanges
// *****************************************************************
/**
* Creates a constraint collector that returns {@link ConnectedRangeChain} about the first fact.
*
* For instance, {@code [Equipment fromInclusive=2, toExclusive=4] [Equipment fromInclusive=3, toExclusive=5]
* [Equipment fromInclusive=6, toExclusive=7] [Equipment fromInclusive=7, toExclusive=8]}
* returns the following information:
*
* <pre>
* {@code
* ConnectedRanges: [mininumOverlap: 1, maximumOverlap: 2,
* [Equipment fromInclusive=2, toExclusive=4] [Equipment fromInclusive=3, toExclusive=5]],
* [mininumOverlap: 1, maximumOverlap: 1,
* [Equipment fromInclusive=6, toExclusive=7] [Equipment fromInclusive=7, toExclusive=8]]
* Breaks: [[Break from=5, to=6, length=1]]
* }
* </pre>
*
* This can be used to ensure a limited resource is not over-assigned.
*
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param differenceFunction Computes the difference between two points. The second argument is always
* larger than the first (ex: {@link Duration#between}
* or {@code (a,b) -> b - a}).
* @param <A> type of the first mapped fact
* @param <PointType_> type of the fact endpoints
* @param <DifferenceType_> type of difference between points
*/
public static <A, PointType_ extends Comparable<PointType_>, DifferenceType_ extends Comparable<DifferenceType_>>
@NonNull UniConstraintCollector<A, ?, ConnectedRangeChain<A, PointType_, DifferenceType_>>
toConnectedRanges(@NonNull Function<A, PointType_> startInclusiveMap,
@NonNull Function<A, PointType_> endExclusiveMap,
@NonNull BiFunction<PointType_, PointType_, DifferenceType_> differenceFunction) {
return InnerUniConstraintCollectors.toConnectedRanges(ConstantLambdaUtils.identity(), startInclusiveMap,
endExclusiveMap,
differenceFunction);
}
/**
* Specialized version of {@link #toConnectedRanges(Function,Function,BiFunction)} for
* {@link Temporal} types.
*
* <p>
* If you intend to use date-based {@link Temporal temporals} (such as {@link LocalDate}),
* convert them to their time-based equivalents using ({@code localDate.atStartOfDay()}.
* Alternatively, use {@link #toConnectedRanges(Function,Function,BiFunction) the non-specialized method}
* and provide {@code differenceFunction} which doesn't use `Duration`
* as the type to represent the difference.
*
* @param <A> type of the first mapped fact
* @param <PointType_> temporal type of the endpoints, needs to support seconds
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
*/
public static <A, PointType_ extends Temporal & Comparable<PointType_>>
@NonNull UniConstraintCollector<A, ?, ConnectedRangeChain<A, PointType_, Duration>>
toConnectedTemporalRanges(@NonNull Function<A, PointType_> startInclusiveMap,
@NonNull Function<A, PointType_> endExclusiveMap) {
return toConnectedRanges(startInclusiveMap, endExclusiveMap, Duration::between);
}
/**
* Specialized version of {@link #toConnectedRanges(Function,Function,BiFunction)} for Long.
*
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
*/
public static <A> @NonNull UniConstraintCollector<A, ?, ConnectedRangeChain<A, Long, Long>>
toConnectedRanges(@NonNull ToLongFunction<A> startInclusiveMap, @NonNull ToLongFunction<A> endExclusiveMap) {
return toConnectedRanges(startInclusiveMap::applyAsLong, endExclusiveMap::applyAsLong, (a, b) -> b - a);
}
/**
* As defined by {@link #toConnectedRanges(Function,Function,BiFunction)}.
*
* @param intervalMap Maps both facts to an item in the cluster
* @param startInclusiveMap Maps the item to its start
* @param endExclusiveMap Maps the item to its end
* @param differenceFunction Computes the difference between two points. The second argument is always
* larger than the first (ex: {@link Duration#between}
* or {@code (a,b) -> b - a}).
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <IntervalType_> type of the item in the cluster
* @param <PointType_> type of the item endpoints
* @param <DifferenceType_> type of difference between points
*/
public static <A, B, IntervalType_, PointType_ extends Comparable<PointType_>, DifferenceType_ extends Comparable<DifferenceType_>>
@NonNull BiConstraintCollector<A, B, ?, ConnectedRangeChain<IntervalType_, PointType_, DifferenceType_>>
toConnectedRanges(@NonNull BiFunction<A, B, IntervalType_> intervalMap,
@NonNull Function<IntervalType_, PointType_> startInclusiveMap,
@NonNull Function<IntervalType_, PointType_> endExclusiveMap,
@NonNull BiFunction<PointType_, PointType_, DifferenceType_> differenceFunction) {
return InnerBiConstraintCollectors.toConnectedRanges(intervalMap, startInclusiveMap, endExclusiveMap,
differenceFunction);
}
/**
* As defined by {@link #toConnectedTemporalRanges(Function,Function)}.
*
* @param intervalMap Maps the three facts to an item in the cluster
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <IntervalType_> type of the item in the cluster
* @param <PointType_> temporal type of the endpoints, needs to support seconds
*/
public static <A, B, IntervalType_, PointType_ extends Temporal & Comparable<PointType_>>
@NonNull BiConstraintCollector<A, B, ?, ConnectedRangeChain<IntervalType_, PointType_, Duration>>
toConnectedTemporalRanges(@NonNull BiFunction<A, B, IntervalType_> intervalMap,
@NonNull Function<IntervalType_, PointType_> startInclusiveMap,
@NonNull Function<IntervalType_, PointType_> endExclusiveMap) {
return toConnectedRanges(intervalMap, startInclusiveMap, endExclusiveMap, Duration::between);
}
/**
* As defined by {@link #toConnectedRanges(ToLongFunction, ToLongFunction)}.
*
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <IntervalType_> type of the item in the cluster
*/
public static <A, B, IntervalType_>
@NonNull BiConstraintCollector<A, B, ?, ConnectedRangeChain<IntervalType_, Long, Long>>
toConnectedRanges(@NonNull BiFunction<A, B, IntervalType_> intervalMap,
@NonNull ToLongFunction<IntervalType_> startInclusiveMap,
@NonNull ToLongFunction<IntervalType_> endExclusiveMap) {
return toConnectedRanges(intervalMap, startInclusiveMap::applyAsLong, endExclusiveMap::applyAsLong, (a, b) -> b - a);
}
/**
* As defined by {@link #toConnectedRanges(Function,Function,BiFunction)}.
*
* @param intervalMap Maps the three facts to an item in the cluster
* @param startInclusiveMap Maps the item to its start
* @param endExclusiveMap Maps the item to its end
* @param differenceFunction Computes the difference between two points. The second argument is always
* larger than the first (ex: {@link Duration#between}
* or {@code (a,b) -> b - a}).
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <IntervalType_> type of the item in the cluster
* @param <PointType_> type of the item endpoints
* @param <DifferenceType_> type of difference between points
*/
public static <A, B, C, IntervalType_, PointType_ extends Comparable<PointType_>, DifferenceType_ extends Comparable<DifferenceType_>>
@NonNull TriConstraintCollector<A, B, C, ?, ConnectedRangeChain<IntervalType_, PointType_, DifferenceType_>>
toConnectedRanges(@NonNull TriFunction<A, B, C, IntervalType_> intervalMap,
@NonNull Function<IntervalType_, PointType_> startInclusiveMap,
@NonNull Function<IntervalType_, PointType_> endExclusiveMap,
@NonNull BiFunction<PointType_, PointType_, DifferenceType_> differenceFunction) {
return InnerTriConstraintCollectors.toConnectedRanges(intervalMap, startInclusiveMap, endExclusiveMap,
differenceFunction);
}
/**
* As defined by {@link #toConnectedTemporalRanges(Function,Function)}.
*
* @param intervalMap Maps the three facts to an item in the cluster
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <IntervalType_> type of the item in the cluster
* @param <PointType_> temporal type of the endpoints, needs to support seconds
*/
public static <A, B, C, IntervalType_, PointType_ extends Temporal & Comparable<PointType_>>
@NonNull TriConstraintCollector<A, B, C, ?, ConnectedRangeChain<IntervalType_, PointType_, Duration>>
toConnectedTemporalRanges(@NonNull TriFunction<A, B, C, IntervalType_> intervalMap,
@NonNull Function<IntervalType_, PointType_> startInclusiveMap,
@NonNull Function<IntervalType_, PointType_> endExclusiveMap) {
return toConnectedRanges(intervalMap, startInclusiveMap, endExclusiveMap, Duration::between);
}
/**
* As defined by {@link #toConnectedRanges(ToLongFunction, ToLongFunction)}.
*
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <IntervalType_> type of the item in the cluster
*/
public static <A, B, C, IntervalType_>
@NonNull TriConstraintCollector<A, B, C, ?, ConnectedRangeChain<IntervalType_, Long, Long>>
toConnectedRanges(@NonNull TriFunction<A, B, C, IntervalType_> intervalMap,
@NonNull ToLongFunction<IntervalType_> startInclusiveMap,
@NonNull ToLongFunction<IntervalType_> endExclusiveMap) {
return toConnectedRanges(intervalMap, startInclusiveMap::applyAsLong, endExclusiveMap::applyAsLong, (a, b) -> b - a);
}
/**
* As defined by {@link #toConnectedRanges(Function,Function,BiFunction)}.
*
* @param intervalMap Maps the four facts to an item in the cluster
* @param startInclusiveMap Maps the item to its start
* @param endExclusiveMap Maps the item to its end
* @param differenceFunction Computes the difference between two points. The second argument is always
* larger than the first (ex: {@link Duration#between}
* or {@code (a,b) -> b - a}).
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <D> type of the fourth mapped fact
* @param <IntervalType_> type of the item in the cluster
* @param <PointType_> type of the item endpoints
* @param <DifferenceType_> type of difference between points
*/
public static <A, B, C, D, IntervalType_, PointType_ extends Comparable<PointType_>, DifferenceType_ extends Comparable<DifferenceType_>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, ConnectedRangeChain<IntervalType_, PointType_, DifferenceType_>>
toConnectedRanges(@NonNull QuadFunction<A, B, C, D, IntervalType_> intervalMap,
@NonNull Function<IntervalType_, PointType_> startInclusiveMap,
@NonNull Function<IntervalType_, PointType_> endExclusiveMap,
@NonNull BiFunction<PointType_, PointType_, DifferenceType_> differenceFunction) {
return InnerQuadConstraintCollectors.toConnectedRanges(intervalMap, startInclusiveMap, endExclusiveMap,
differenceFunction);
}
/**
* As defined by {@link #toConnectedTemporalRanges(Function,Function)}.
*
* @param intervalMap Maps the three facts to an item in the cluster
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <D> type of the fourth mapped fact
* @param <IntervalType_> type of the item in the cluster
* @param <PointType_> temporal type of the endpoints, needs to support seconds
*/
public static <A, B, C, D, IntervalType_, PointType_ extends Temporal & Comparable<PointType_>>
@NonNull QuadConstraintCollector<A, B, C, D, ?, ConnectedRangeChain<IntervalType_, PointType_, Duration>>
toConnectedTemporalRanges(@NonNull QuadFunction<A, B, C, D, IntervalType_> intervalMap,
@NonNull Function<IntervalType_, PointType_> startInclusiveMap,
@NonNull Function<IntervalType_, PointType_> endExclusiveMap) {
return toConnectedRanges(intervalMap, startInclusiveMap, endExclusiveMap, Duration::between);
}
/**
* As defined by {@link #toConnectedRanges(ToLongFunction, ToLongFunction)}.
*
* @param startInclusiveMap Maps the fact to its start
* @param endExclusiveMap Maps the fact to its end
* @param <A> type of the first mapped fact
* @param <B> type of the second mapped fact
* @param <C> type of the third mapped fact
* @param <D> type of the fourth mapped fact
* @param <IntervalType_> type of the item in the cluster
*/
public static <A, B, C, D, IntervalType_>
@NonNull QuadConstraintCollector<A, B, C, D, ?, ConnectedRangeChain<IntervalType_, Long, Long>>
toConnectedRanges(@NonNull QuadFunction<A, B, C, D, IntervalType_> intervalMap,
@NonNull ToLongFunction<IntervalType_> startInclusiveMap,
@NonNull ToLongFunction<IntervalType_> endExclusiveMap) {
return toConnectedRanges(intervalMap, startInclusiveMap::applyAsLong, endExclusiveMap::applyAsLong, (a, b) -> b - a);
}
// ************************************************************************
// load balancing
// ************************************************************************
/**
* As defined by {@link #loadBalance(Function, ToLongFunction, ToLongFunction)},
* where the current load for each balanced item is set to one
* and the starting load for each balanced item is set to zero.
*/
public static <A, Balanced_> @NonNull UniConstraintCollector<A, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull Function<A, Balanced_> balancedItemFunction) {
return loadBalance(balancedItemFunction, ConstantLambdaUtils.uniConstantOneLong());
}
/**
* As defined by {@link #loadBalance(Function, ToLongFunction, ToLongFunction)},
* where the starting load for each balanced item is set to zero.
*/
public static <A, Balanced_> @NonNull UniConstraintCollector<A, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull Function<A, Balanced_> balancedItemFunction, @NonNull ToLongFunction<A> loadFunction) {
return loadBalance(balancedItemFunction, loadFunction, ConstantLambdaUtils.uniConstantZeroLong());
}
/**
* Returns a collector that takes a stream of items and calculates the unfairness measure from them
* (see {@link LoadBalance#unfairness()}).
* The load for every item is provided by the loadFunction,
* with the starting load provided by the initialLoadFunction.
* <p>
* When this collector is used in a constraint stream,
* it is recommended that the score type be one of those based on {@link BigDecimal},
* such as {@link HardSoftBigDecimalScore}.
* This is so that the unfairness measure keeps its precision
* without forcing the other constraints to be multiplied by a large constant,
* which would otherwise be required to implement fixed-point arithmetic.
*
* @param balancedItemFunction The function that returns the item which should be load-balanced.
* @param loadFunction How much the item should count for in the formula.
* @param initialLoadFunction The initial value of the metric,
* allowing to provide initial state
* without requiring the entire previous planning windows in the working memory.
* @param <A> type of the matched fact
* @param <Balanced_> type of the item being balanced
*/
public static <A, Balanced_> @NonNull UniConstraintCollector<A, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull Function<A, Balanced_> balancedItemFunction, @NonNull ToLongFunction<A> loadFunction,
@NonNull ToLongFunction<A> initialLoadFunction) {
return InnerUniConstraintCollectors.loadBalance(balancedItemFunction, loadFunction, initialLoadFunction);
}
/**
* As defined by {@link #loadBalance(BiFunction, ToLongBiFunction, ToLongBiFunction)},
* where the current load for each balanced item is set to one
* and the starting load for each balanced item is set to zero.
*/
public static <A, B, Balanced_> @NonNull BiConstraintCollector<A, B, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull BiFunction<A, B, Balanced_> balancedItemFunction) {
return loadBalance(balancedItemFunction, ConstantLambdaUtils.biConstantOneLong());
}
/**
* As defined by {@link #loadBalance(BiFunction, ToLongBiFunction, ToLongBiFunction)},
* where the starting load for each balanced item is set to zero.
*/
public static <A, B, Balanced_> @NonNull BiConstraintCollector<A, B, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull BiFunction<A, B, Balanced_> balancedItemFunction, @NonNull ToLongBiFunction<A, B> loadFunction) {
return loadBalance(balancedItemFunction, loadFunction, ConstantLambdaUtils.biConstantZeroLong());
}
/**
* As defined by {@link #loadBalance(Function, ToLongFunction, ToLongFunction)}.
*/
public static <A, B, Balanced_> @NonNull BiConstraintCollector<A, B, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull BiFunction<A, B, Balanced_> balancedItemFunction, @NonNull ToLongBiFunction<A, B> loadFunction,
@NonNull ToLongBiFunction<A, B> initialLoadFunction) {
return InnerBiConstraintCollectors.loadBalance(balancedItemFunction, loadFunction, initialLoadFunction);
}
/**
* As defined by {@link #loadBalance(TriFunction, ToLongTriFunction, ToLongTriFunction)},
* where the current load for each balanced item is set to one
* and the starting load for each balanced item is set to zero.
*/
public static <A, B, C, Balanced_> @NonNull TriConstraintCollector<A, B, C, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull TriFunction<A, B, C, Balanced_> balancedItemFunction) {
return loadBalance(balancedItemFunction, ConstantLambdaUtils.triConstantOneLong());
}
/**
* As defined by {@link #loadBalance(TriFunction, ToLongTriFunction, ToLongTriFunction)},
* where the starting load for each balanced item is set to zero.
*/
public static <A, B, C, Balanced_> @NonNull TriConstraintCollector<A, B, C, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull TriFunction<A, B, C, Balanced_> balancedItemFunction, @NonNull ToLongTriFunction<A, B, C> loadFunction) {
return loadBalance(balancedItemFunction, loadFunction, ConstantLambdaUtils.triConstantZeroLong());
}
/**
* As defined by {@link #loadBalance(Function, ToLongFunction, ToLongFunction)}.
*/
public static <A, B, C, Balanced_> @NonNull TriConstraintCollector<A, B, C, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull TriFunction<A, B, C, Balanced_> balancedItemFunction, @NonNull ToLongTriFunction<A, B, C> loadFunction,
@NonNull ToLongTriFunction<A, B, C> initialLoadFunction) {
return InnerTriConstraintCollectors.loadBalance(balancedItemFunction, loadFunction, initialLoadFunction);
}
/**
* As defined by {@link #loadBalance(QuadFunction, ToLongQuadFunction, ToLongQuadFunction)},
* where the current load for each balanced item is set to one
* and the starting load for each balanced item is set to zero.
*/
public static <A, B, C, D, Balanced_> @NonNull QuadConstraintCollector<A, B, C, D, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull QuadFunction<A, B, C, D, Balanced_> balancedItemFunction) {
return loadBalance(balancedItemFunction, ConstantLambdaUtils.quadConstantOneLong());
}
/**
* As defined by {@link #loadBalance(QuadFunction, ToLongQuadFunction, ToLongQuadFunction)},
* where the starting load for each balanced item is set to zero.
*/
public static <A, B, C, D, Balanced_> @NonNull QuadConstraintCollector<A, B, C, D, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull QuadFunction<A, B, C, D, Balanced_> balancedItemFunction,
@NonNull ToLongQuadFunction<A, B, C, D> loadFunction) {
return loadBalance(balancedItemFunction, loadFunction, ConstantLambdaUtils.quadConstantZeroLong());
}
/**
* As defined by {@link #loadBalance(Function, ToLongFunction, ToLongFunction)}.
*/
public static <A, B, C, D, Balanced_> @NonNull QuadConstraintCollector<A, B, C, D, ?, LoadBalance<Balanced_>> loadBalance(
@NonNull QuadFunction<A, B, C, D, Balanced_> balancedItemFunction,
@NonNull ToLongQuadFunction<A, B, C, D> loadFunction,
@NonNull ToLongQuadFunction<A, B, C, D> initialLoadFunction) {
return InnerQuadConstraintCollectors.loadBalance(balancedItemFunction, loadFunction, initialLoadFunction);
}
private ConstraintCollectors() {
}
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintFactory.java | package ai.timefold.solver.core.api.score.stream;
import java.util.function.BiPredicate;
import java.util.function.Function;
import java.util.function.Predicate;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.lookup.PlanningId;
import ai.timefold.solver.core.api.domain.solution.ProblemFactCollectionProperty;
import ai.timefold.solver.core.api.domain.variable.InverseRelationShadowVariable;
import ai.timefold.solver.core.api.domain.variable.PlanningListVariable;
import ai.timefold.solver.core.api.domain.variable.PlanningVariable;
import ai.timefold.solver.core.api.domain.variable.ShadowVariablesInconsistent;
import ai.timefold.solver.core.api.score.stream.bi.BiConstraintStream;
import ai.timefold.solver.core.api.score.stream.bi.BiJoiner;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintStream;
import org.jspecify.annotations.NonNull;
/**
* The factory to create every {@link ConstraintStream} (for example with {@link #forEach(Class)})
* which ends in a {@link Constraint} returned by {@link ConstraintProvider#defineConstraints(ConstraintFactory)}.
*/
public interface ConstraintFactory {
/**
* @deprecated Do not rely on any constraint package in user code.
*/
@Deprecated(forRemoval = true, since = "1.13.0")
@NonNull
String getDefaultConstraintPackage();
// ************************************************************************
// forEach*
// ************************************************************************
/**
* Start a {@link ConstraintStream} of all instances of the sourceClass
* that are known as {@link ProblemFactCollectionProperty problem facts} or {@link PlanningEntity planning entities}.
* <p>
* If the sourceClass is a {@link PlanningEntity}, then it is automatically
* {@link UniConstraintStream#filter(Predicate) filtered} to only contain entities
* that are {@link ShadowVariablesInconsistent consistent}
* and for which each genuine {@link PlanningVariable} (of the sourceClass or a superclass thereof) has a non-null value.
* <p>
* If the sourceClass is a shadow entity (an entity without any genuine planning variables),
* and if there exists a genuine {@link PlanningEntity} with a {@link PlanningListVariable}
* which accepts instances of this shadow entity as values in that list,
* and if that list variable {@link PlanningListVariable#allowsUnassignedValues() allows unassigned values},
* then this stream will filter out all sourceClass instances
* which are not present in any instances of that list variable.
* This is achieved in one of two ways:
*
* <ul>
* <li>If the sourceClass has {@link InverseRelationShadowVariable} field
* referencing instance of an entity with the list variable,
* the value of that field will be used to determine if the value is assigned.
* Null in that field means the instance of sourceClass is unassigned.</li>
* <li>As fallback, the value is considered assigned if there exists
* an instance of the entity where its list variable contains the value.
* This will perform significantly worse and only exists
* so that using the {@link InverseRelationShadowVariable} can remain optional.
* Adding the field is strongly recommended.</li>
* </ul>
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
*/
<A> @NonNull UniConstraintStream<A> forEach(@NonNull Class<A> sourceClass);
/**
* As defined by {@link #forEachIncludingUnassigned(Class)}.
*
* @deprecated Use {@link #forEachIncludingUnassigned(Class)} instead.
*/
@Deprecated(forRemoval = true, since = "1.8.0")
default <A> @NonNull UniConstraintStream<A> forEachIncludingNullVars(@NonNull Class<A> sourceClass) {
return forEachIncludingUnassigned(sourceClass);
}
/**
* As defined by {@link #forEach(Class)},
* but without any filtering of unassigned {@link PlanningEntity planning entities}
* (for {@link PlanningVariable#allowsUnassigned()})
* or shadow entities not assigned to any applicable list variable
* (for {@link PlanningListVariable#allowsUnassignedValues()}).
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
*/
<A> @NonNull UniConstraintStream<A> forEachIncludingUnassigned(@NonNull Class<A> sourceClass);
/**
* As defined by {@link #forEach(Class)},
* but without any filtering of {@link ShadowVariablesInconsistent inconsistent} or unassigned {@link PlanningEntity
* planning entities}
* (for {@link PlanningVariable#allowsUnassigned()})
* or shadow entities not assigned to any applicable list variable
* (for {@link PlanningListVariable#allowsUnassignedValues()}).
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
*/
<A> @NonNull UniConstraintStream<A> forEachUnfiltered(@NonNull Class<A> sourceClass);
/**
* Create a new {@link BiConstraintStream} for every unique combination of A and another A with a higher {@link PlanningId}.
* <p>
* Important: {@link BiConstraintStream#filter(BiPredicate) Filtering} this is slower and less scalable
* than using a {@link #forEachUniquePair(Class, BiJoiner) joiner},
* because it barely applies hashing and/or indexing on the properties,
* so it creates and checks almost every combination of A and A.
* <p>
* This method is syntactic sugar for {@link UniConstraintStream#join(Class)}.
* It automatically adds a {@link Joiners#lessThan(Function) lessThan} joiner on the {@link PlanningId} of A.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A
*/
default <A> @NonNull BiConstraintStream<A, A> forEachUniquePair(@NonNull Class<A> sourceClass) {
return forEachUniquePair(sourceClass, new BiJoiner[0]);
}
/**
* Create a new {@link BiConstraintStream} for every unique combination of A and another A with a higher {@link PlanningId}
* for which the {@link BiJoiner} is true (for the properties it extracts from both facts).
* <p>
* Important: This is faster and more scalable than not using a {@link #forEachUniquePair(Class)} joiner}
* followed by a {@link BiConstraintStream#filter(BiPredicate) filter},
* because it applies hashing and/or indexing on the properties,
* so it doesn't create nor checks almost every combination of A and A.
* <p>
* This method is syntactic sugar for {@link UniConstraintStream#join(Class, BiJoiner)}.
* It automatically adds a {@link Joiners#lessThan(Function) lessThan} joiner on the {@link PlanningId} of A.
* <p>
* This method has overloaded methods with multiple {@link BiJoiner} parameters.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which the {@link BiJoiner} is true
*/
default <A> @NonNull BiConstraintStream<A, A> forEachUniquePair(@NonNull Class<A> sourceClass,
@NonNull BiJoiner<A, A> joiner) {
return forEachUniquePair(sourceClass, new BiJoiner[] { joiner });
}
/**
* As defined by {@link #forEachUniquePair(Class, BiJoiner)}.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
*/
default <A> @NonNull BiConstraintStream<A, A> forEachUniquePair(@NonNull Class<A> sourceClass,
@NonNull BiJoiner<A, A> joiner1,
@NonNull BiJoiner<A, A> joiner2) {
return forEachUniquePair(sourceClass, new BiJoiner[] { joiner1, joiner2 });
}
/**
* As defined by {@link #forEachUniquePair(Class, BiJoiner)}.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
*/
default <A> @NonNull BiConstraintStream<A, A> forEachUniquePair(@NonNull Class<A> sourceClass,
@NonNull BiJoiner<A, A> joiner1, @NonNull BiJoiner<A, A> joiner2,
@NonNull BiJoiner<A, A> joiner3) {
return forEachUniquePair(sourceClass, new BiJoiner[] { joiner1, joiner2, joiner3 });
}
/**
* As defined by {@link #forEachUniquePair(Class, BiJoiner)}.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
*/
default <A> @NonNull BiConstraintStream<A, A> forEachUniquePair(@NonNull Class<A> sourceClass,
@NonNull BiJoiner<A, A> joiner1, @NonNull BiJoiner<A, A> joiner2,
@NonNull BiJoiner<A, A> joiner3, @NonNull BiJoiner<A, A> joiner4) {
return forEachUniquePair(sourceClass, new BiJoiner[] { joiner1, joiner2, joiner3, joiner4 });
}
/**
* As defined by {@link #forEachUniquePair(Class, BiJoiner)}.
* <p>
* This method causes <i>Unchecked generics array creation for varargs parameter</i> warnings,
* but we can't fix it with a {@link SafeVarargs} annotation because it's an interface method.
* Therefore, there are overloaded methods with up to 4 {@link BiJoiner} parameters.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
*/
<A> @NonNull BiConstraintStream<A, A> forEachUniquePair(@NonNull Class<A> sourceClass, @NonNull BiJoiner<A, A>... joiners);
// ************************************************************************
// from* (deprecated)
// ************************************************************************
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEach(Class)}, but first understand this:
*
* <ul>
* <li>If none of your planning variables {@link PlanningVariable#allowsUnassigned() allow unassigned values},
* then the replacement by {@link #forEach(Class)} has little to no impact.
* Subsequent conditional propagation calls ({@link UniConstraintStream#ifExists} etc.)
* will now also filter out planning entities with null variables,
* consistently with {@link #forEach(Class)} family of methods and with joining.</li>
* <li>If any of your planning variables {@link PlanningVariable#allowsUnassigned() allow unassigned values},
* then there is severe impact.
* Calls to the {@link #forEach(Class)} family of methods will now filter out planning entities with null variables,
* so most constraints no longer need to do null checks,
* but the constraint that penalizes unassigned entities (typically a medium constraint)
* must now use {@link #forEachIncludingUnassigned(Class)} instead.
* Subsequent joins and conditional propagation calls will now also consistently filter out planning entities with null
* variables.</li>
* </ul>
* <p>
* The original Javadoc of this method follows:
* <p>
* Start a {@link ConstraintStream} of all instances of the fromClass
* that are known as {@link ProblemFactCollectionProperty problem facts} or {@link PlanningEntity planning entities}.
* <p>
* If the fromClass is a {@link PlanningEntity}, then it is automatically
* {@link UniConstraintStream#filter(Predicate) filtered} to only contain fully initialized entities,
* for which each genuine {@link PlanningVariable} (of the fromClass or a superclass thereof) is initialized.
* This filtering will NOT automatically apply to genuine planning variables of subclass planning entities of the fromClass.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @deprecated This method is deprecated in favor of {@link #forEach(Class)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
*/
@Deprecated(forRemoval = true)
<A> @NonNull UniConstraintStream<A> from(@NonNull Class<A> fromClass);
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachIncludingUnassigned(Class)},
* but first understand that subsequent joins and conditional propagation calls
* ({@link UniConstraintStream#ifExists} etc.)
* will now also consistently filter out planning entities with null variables.
* <p>
* The original Javadoc of this method follows:
* <p>
* As defined by {@link #from(Class)},
* but without any filtering of uninitialized {@link PlanningEntity planning entities}.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @deprecated Prefer {@link #forEachIncludingUnassigned(Class)}.
*/
@Deprecated(forRemoval = true)
@NonNull
<A> UniConstraintStream<A> fromUnfiltered(@NonNull Class<A> fromClass);
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachUniquePair(Class)},
* but first understand that the same precautions apply as with the use of {@link #from(Class)}.
* <p>
* The original Javadoc of this method follows:
* <p>
* Create a new {@link BiConstraintStream} for every unique combination of A and another A with a higher {@link PlanningId}.
* <p>
* Important: {@link BiConstraintStream#filter(BiPredicate) Filtering} this is slower and less scalable
* than using a {@link #fromUniquePair(Class, BiJoiner) joiner},
* because it barely applies hashing and/or indexing on the properties,
* so it creates and checks almost every combination of A and A.
* <p>
* This method is syntactic sugar for {@link UniConstraintStream#join(Class)}.
* It automatically adds a {@link Joiners#lessThan(Function) lessThan} joiner on the {@link PlanningId} of A.
*
* @deprecated Prefer {@link #forEachUniquePair(Class)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A
*/
@Deprecated(forRemoval = true)
default <A> @NonNull BiConstraintStream<A, A> fromUniquePair(@NonNull Class<A> fromClass) {
return fromUniquePair(fromClass, new BiJoiner[0]);
}
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachUniquePair(Class, BiJoiner)},
* but first understand that the same precautions apply as with the use of {@link #from(Class)}.
* <p>
* The original Javadoc of this method follows:
* <p>
* Create a new {@link BiConstraintStream} for every unique combination of A and another A with a higher {@link PlanningId}
* for which the {@link BiJoiner} is true (for the properties it extracts from both facts).
* <p>
* Important: This is faster and more scalable than not using a {@link #fromUniquePair(Class)} joiner}
* followed by a {@link BiConstraintStream#filter(BiPredicate) filter},
* because it applies hashing and/or indexing on the properties,
* so it doesn't create nor checks almost every combination of A and A.
* <p>
* This method is syntactic sugar for {@link UniConstraintStream#join(Class, BiJoiner)}.
* It automatically adds a {@link Joiners#lessThan(Function) lessThan} joiner on the {@link PlanningId} of A.
* <p>
* This method has overloaded methods with multiple {@link BiJoiner} parameters.
*
* @deprecated Prefer {@link #forEachUniquePair(Class, BiJoiner)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which the {@link BiJoiner} is true
*/
@Deprecated(forRemoval = true)
default <A> @NonNull BiConstraintStream<A, A> fromUniquePair(@NonNull Class<A> fromClass, @NonNull BiJoiner<A, A> joiner) {
return fromUniquePair(fromClass, new BiJoiner[] { joiner });
}
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachUniquePair(Class, BiJoiner, BiJoiner)},
* but first understand that the same precautions apply as with the use of {@link #from(Class)}.
* <p>
* The original Javadoc of this method follows:
* <p>
* As defined by {@link #fromUniquePair(Class, BiJoiner)}.
*
* @deprecated Prefer {@link #forEachUniquePair(Class, BiJoiner, BiJoiner)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
*/
@Deprecated(forRemoval = true)
default <A> @NonNull BiConstraintStream<A, A> fromUniquePair(@NonNull Class<A> fromClass, @NonNull BiJoiner<A, A> joiner1,
@NonNull BiJoiner<A, A> joiner2) {
return fromUniquePair(fromClass, new BiJoiner[] { joiner1, joiner2 });
}
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachUniquePair(Class, BiJoiner, BiJoiner, BiJoiner)},
* but first understand that the same precautions apply as with the use of {@link #from(Class)}.
* <p>
* The original Javadoc of this method follows:
* <p>
* As defined by {@link #fromUniquePair(Class, BiJoiner)}.
*
* @deprecated Prefer {@link #forEachUniquePair(Class, BiJoiner, BiJoiner, BiJoiner)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
*/
@Deprecated(forRemoval = true)
default <A> @NonNull BiConstraintStream<A, A> fromUniquePair(@NonNull Class<A> fromClass, @NonNull BiJoiner<A, A> joiner1,
@NonNull BiJoiner<A, A> joiner2, @NonNull BiJoiner<A, A> joiner3) {
return fromUniquePair(fromClass, new BiJoiner[] { joiner1, joiner2, joiner3 });
}
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachUniquePair(Class, BiJoiner, BiJoiner, BiJoiner, BiJoiner)},
* but first understand that the same precautions apply as with the use of {@link #from(Class)}.
* <p>
* The original Javadoc of this method follows:
* <p>
* As defined by {@link #fromUniquePair(Class, BiJoiner)}.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
* @deprecated Prefer {@link #forEachUniquePair(Class, BiJoiner, BiJoiner, BiJoiner, BiJoiner)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
*/
@Deprecated(forRemoval = true)
default @NonNull <A> BiConstraintStream<A, A> fromUniquePair(@NonNull Class<A> fromClass,
@NonNull BiJoiner<A, A> joiner1, @NonNull BiJoiner<A, A> joiner2,
@NonNull BiJoiner<A, A> joiner3, @NonNull BiJoiner<A, A> joiner4) {
return fromUniquePair(fromClass, new BiJoiner[] { joiner1, joiner2, joiner3, joiner4 });
}
/**
* This method is deprecated.
* Migrate uses of this method to {@link #forEachUniquePair(Class, BiJoiner...)},
* but first understand that the same precautions apply as with the use of {@link #from(Class)}.
* <p>
* The original Javadoc of this method follows:
* <p>
* As defined by {@link #fromUniquePair(Class, BiJoiner)}.
* <p>
* This method causes <i>Unchecked generics array creation for varargs parameter</i> warnings,
* but we can't fix it with a {@link SafeVarargs} annotation because it's an interface method.
* Therefore, there are overloaded methods with up to 4 {@link BiJoiner} parameters.
*
* @param <A> the type of the matched problem fact or {@link PlanningEntity planning entity}
* @return a stream that matches every unique combination of A and another A for which all the
* {@link BiJoiner joiners} are true
* @deprecated Prefer {@link #forEachUniquePair(Class, BiJoiner...)},
* which exhibits the same behavior for planning variables
* which both allow and don't allow unassigned values.
*/
@Deprecated(forRemoval = true)
@NonNull
<A> BiConstraintStream<A, A> fromUniquePair(@NonNull Class<A> fromClass, @NonNull BiJoiner<A, A>... joiners);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintJustification.java | package ai.timefold.solver.core.api.score.stream;
import java.util.UUID;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.analysis.MatchAnalysis;
import ai.timefold.solver.core.api.score.analysis.ScoreAnalysis;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatch;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintStream;
import ai.timefold.solver.core.api.solver.SolutionManager;
/**
* Marker interface for constraint justifications.
* All classes used as constraint justifications must implement this interface.
*
* <p>
* Implementations may decide to implement {@link Comparable}
* to preserve order of instances when displayed in user interfaces, logs etc.
* This is entirely optional.
*
* <p>
* If two instances of this class are {@link Object#equals(Object) equal},
* they are considered to be the same justification.
* This matters in case of {@link SolutionManager#analyze(Object)} score analysis
* where such justifications are grouped together.
* This situation is likely to occur in case a {@link ConstraintStream} produces duplicate tuples,
* which can be avoided by using {@link UniConstraintStream#distinct()} or its bi, tri and quad counterparts.
* Alternatively, some unique ID (such as {@link UUID#randomUUID()}) can be used to distinguish between instances.
*
* <p>
* Score analysis does not {@link ScoreAnalysis#diff(ScoreAnalysis) diff} contents of the implementations;
* instead it uses equality of the implementations (as defined above) to tell them apart from the outside.
* For this reason, it is recommended that:
* <ul>
* <li>The implementations must not use {@link Score} for {@link Object#equals(Object) equal} and hash codes,
* as that would prevent diffing from working entirely.</li>
* <li>The implementations should not store any {@link Score} instances,
* as they would not be diffed, leading to confusion with {@link MatchAnalysis#score()}, which does get diffed.</li>
* </ul>
*
* <p>
* If the user wishes to use score analysis, they are required to ensure
* that the class(es) implementing this interface can be serialized into any format
* which is supported by the {@link SolutionManager} implementation, typically JSON.
*
* @see ConstraintMatch#getJustification()
*/
public interface ConstraintJustification {
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintMetaModel.java | package ai.timefold.solver.core.api.score.stream;
import java.util.Collection;
import java.util.Set;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* Provides information about the known constraints.
* Works in combination with {@link ConstraintProvider}.
*/
public interface ConstraintMetaModel {
/**
* Returns the constraint for the given reference.
*
* @return null if such constraint does not exist
*/
@Nullable
Constraint getConstraint(@NonNull ConstraintRef constraintRef);
/**
* Returns all constraints defined in the {@link ConstraintProvider}.
*
* @return iteration order is undefined
*/
@NonNull
Collection<Constraint> getConstraints();
/**
* Returns all constraints from {@link #getConstraints()} that belong to the given group.
*
* @return iteration order is undefined
*/
@NonNull
Collection<Constraint> getConstraintsPerGroup(@NonNull String constraintGroup);
/**
* Returns constraint groups with at least one constraint in it.
*
* @return iteration order is undefined
*/
@NonNull
Set<String> getConstraintGroups();
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintProvider.java | package ai.timefold.solver.core.api.score.stream;
import ai.timefold.solver.core.api.domain.constraintweight.ConstraintWeight;
import ai.timefold.solver.core.api.score.Score;
import org.jspecify.annotations.NonNull;
/**
* Used by Constraint Streams' {@link Score} calculation.
* An implementation must be stateless in order to facilitate building a single set of constraints
* independent of potentially changing constraint weights.
*/
public interface ConstraintProvider {
/**
* This method is called once to create the constraints.
* To create a {@link Constraint}, start with {@link ConstraintFactory#forEach(Class)}.
*
* @return an array of all {@link Constraint constraints} that could apply.
* The constraints with a zero {@link ConstraintWeight} for a particular problem
* will be automatically disabled when scoring that problem, to improve performance.
*/
Constraint @NonNull [] defineConstraints(@NonNull ConstraintFactory constraintFactory);
}
|
0 | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score | java-sources/ai/timefold/solver/timefold-solver-core/1.26.1/ai/timefold/solver/core/api/score/stream/ConstraintStream.java | package ai.timefold.solver.core.api.score.stream;
import java.util.stream.Stream;
import ai.timefold.solver.core.api.domain.constraintweight.ConstraintConfiguration;
import ai.timefold.solver.core.api.domain.constraintweight.ConstraintWeight;
import ai.timefold.solver.core.api.domain.entity.PlanningEntity;
import ai.timefold.solver.core.api.domain.solution.ConstraintWeightOverrides;
import ai.timefold.solver.core.api.domain.solution.PlanningSolution;
import ai.timefold.solver.core.api.domain.solution.ProblemFactCollectionProperty;
import ai.timefold.solver.core.api.domain.solution.ProblemFactProperty;
import ai.timefold.solver.core.api.score.Score;
import ai.timefold.solver.core.api.score.constraint.ConstraintMatchTotal;
import ai.timefold.solver.core.api.score.constraint.ConstraintRef;
import ai.timefold.solver.core.api.score.stream.bi.BiConstraintStream;
import ai.timefold.solver.core.api.score.stream.bi.BiJoiner;
import ai.timefold.solver.core.api.score.stream.uni.UniConstraintStream;
import ai.timefold.solver.core.api.solver.SolutionManager;
import org.jspecify.annotations.NonNull;
/**
* A constraint stream is a declaration on how to match {@link UniConstraintStream one}, {@link BiConstraintStream two}
* or more objects.
* Constraint streams are similar to a declaration of a JDK {@link Stream} or an SQL query,
* but they support incremental score calculation and {@link SolutionManager#analyze(Object) score analysis}.
* <p>
* An object that passes through constraint streams is called a fact.
* It's either a {@link ProblemFactCollectionProperty problem fact} or a {@link PlanningEntity planning entity}.
* <p>
* A constraint stream is typically created with {@link ConstraintFactory#forEach(Class)}
* or {@link UniConstraintStream#join(UniConstraintStream, BiJoiner)} by joining another constraint stream}.
* Constraint streams form a directed, non-cyclic graph, with multiple start nodes (which listen to fact changes)
* and one end node per {@link Constraint} (which affect the {@link Score}).
* <p>
* Throughout this documentation, we will be using the following terminology:
*
* <dl>
* <dt>Constraint Stream</dt>
* <dd>A chain of different operations, originated by {@link ConstraintFactory#forEach(Class)} (or similar
* methods) and terminated by a penalization or reward operation.</dd>
* <dt>Operation</dt>
* <dd>Operations (implementations of {@link ConstraintStream}) are parts of a constraint stream which mutate
* it.
* They may remove tuples from further evaluation, expand or contract streams. Every constraint stream has
* a terminal operation, which is either a penalization or a reward.</dd>
* <dt>Fact</dt>
* <dd>Object instance entering the constraint stream.</dd>
* <dt>Genuine Fact</dt>
* <dd>Fact that enters the constraint stream either through a from(...) call or through a join(...) call.
* Genuine facts are either planning entities (see {@link PlanningEntity}) or problem facts (see
* {@link ProblemFactProperty} or {@link ProblemFactCollectionProperty}).</dd>
* <dt>Inferred Fact</dt>
* <dd>Fact that enters the constraint stream through a computation.
* This would typically happen through an operation such as groupBy(...).</dd>
* <dt>Tuple</dt>
* <dd>A collection of facts that the constraint stream operates on, propagating them from operation to
* operation.
* For example, {@link UniConstraintStream} operates on single-fact tuples {A} and {@link BiConstraintStream}
* operates on two-fact tuples {A, B}.
* Putting facts into a tuple implies a relationship exists between these facts.</dd>
* <dt>Match</dt>
* <dd>Match is a tuple that reached the terminal operation of a constraint stream and is therefore either
* penalized or rewarded.</dd>
* <dt>Cardinality</dt>
* <dd>The number of facts in a tuple. Uni constraint streams have a cardinality of 1, bi constraint streams
* have a cardinality of 2, etc.</dd>
* <dt>Conversion</dt>
* <dd>An operation that changes the cardinality of a constraint stream.
* This typically happens through join(...) or a groupBy(...) operations.</dd>
* </dl>
*/
public interface ConstraintStream {
/**
* The {@link ConstraintFactory} that build this.
*/
@NonNull
ConstraintFactory getConstraintFactory();
// ************************************************************************
// Penalize/reward
// ************************************************************************
/**
* Negatively impact the {@link Score}: subtract the constraintWeight for each match.
* <p>
* To avoid hard-coding the constraintWeight, to allow end-users to tweak it,
* use {@link #penalizeConfigurable(String)} and a {@link ConstraintConfiguration} instead.
* <p>
* The {@link ConstraintRef#packageName() constraint package} defaults to the package of the {@link PlanningSolution} class.
*
* @deprecated Prefer {@link UniConstraintStream#penalize(Score)} and equivalent bi/tri/... overloads.
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint penalize(@NonNull String constraintName, @NonNull Score<?> constraintWeight);
/**
* As defined by {@link #penalize(String, Score)}.
*
* @deprecated Prefer {@link UniConstraintStream#penalize(Score)} and equivalent bi/tri/... overloads.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint penalize(@NonNull String constraintPackage, @NonNull String constraintName, @NonNull Score<?> constraintWeight);
/**
* Negatively impact the {@link Score}: subtract the {@link ConstraintWeight} for each match.
* <p>
* The constraintWeight comes from an {@link ConstraintWeight} annotated member on the {@link ConstraintConfiguration},
* so end users can change the constraint weights dynamically.
* This constraint may be deactivated if the {@link ConstraintWeight} is zero.
* <p>
* The {@link ConstraintRef#packageName() constraint package} defaults to
* {@link ConstraintConfiguration#constraintPackage()}.
*
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
* @deprecated Prefer {@code penalize()} and {@link ConstraintWeightOverrides}.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint penalizeConfigurable(@NonNull String constraintName);
/**
* As defined by {@link #penalizeConfigurable(String)}.
*
* @deprecated Prefer {@code penalize()} and {@link ConstraintWeightOverrides}.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint penalizeConfigurable(@NonNull String constraintPackage, @NonNull String constraintName);
/**
* Positively impact the {@link Score}: add the constraintWeight for each match.
* <p>
* To avoid hard-coding the constraintWeight, to allow end-users to tweak it,
* use {@link #penalizeConfigurable(String)} and a {@link ConstraintConfiguration} instead.
* <p>
* The {@link ConstraintRef#packageName() constraint package} defaults to the package of the {@link PlanningSolution} class.
*
* @deprecated Prefer {@link UniConstraintStream#reward(Score)} and equivalent bi/tri/... overloads.
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint reward(@NonNull String constraintName, @NonNull Score<?> constraintWeight);
/**
* As defined by {@link #reward(String, Score)}.
*
* @deprecated Prefer {@link UniConstraintStream#reward(Score)} and equivalent bi/tri/... overloads.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint reward(@NonNull String constraintPackage, @NonNull String constraintName, @NonNull Score<?> constraintWeight);
/**
* Positively impact the {@link Score}: add the {@link ConstraintWeight} for each match.
* <p>
* The constraintWeight comes from an {@link ConstraintWeight} annotated member on the {@link ConstraintConfiguration},
* so end users can change the constraint weights dynamically.
* This constraint may be deactivated if the {@link ConstraintWeight} is zero.
* <p>
* The {@link ConstraintRef#packageName() constraint package} defaults to
* {@link ConstraintConfiguration#constraintPackage()}.
*
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
* @deprecated Prefer {@code reward()} and {@link ConstraintWeightOverrides}.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint rewardConfigurable(@NonNull String constraintName);
/**
* As defined by {@link #rewardConfigurable(String)}.
*
* @deprecated Prefer {@code reward()} and {@link ConstraintWeightOverrides}.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint rewardConfigurable(@NonNull String constraintPackage, @NonNull String constraintName);
/**
* Positively or negatively impact the {@link Score} by the constraintWeight for each match.
* <p>
* Use {@code penalize(...)} or {@code reward(...)} instead, unless this constraint can both have positive and
* negative weights.
* <p>
* The {@link ConstraintRef#packageName() constraint package} defaults to the package of the {@link PlanningSolution} class.
*
* @deprecated Prefer {@link UniConstraintStream#impact(Score)} and equivalent bi/tri/... overloads.
* @param constraintName shows up in {@link ConstraintMatchTotal} during score justification
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint impact(@NonNull String constraintName, @NonNull Score<?> constraintWeight);
/**
* As defined by {@link #impact(String, Score)}.
*
* @deprecated Prefer {@link UniConstraintStream#impact(Score)} and equivalent bi/tri/... overloads.
*/
@Deprecated(forRemoval = true)
@NonNull
Constraint impact(@NonNull String constraintPackage, @NonNull String constraintName, @NonNull Score<?> constraintWeight);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.