index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/IScikitLearnLoaderListFieldSetter.java
package ai.sklearn4j.core.packaging.loaders; import java.util.List; /** * A container for a method that sets a list value into a scikit-learn object during deserialization. * * @param <ObjectType> The type of the scikit-learn object. */ public interface IScikitLearnLoaderListFieldSetter<ObjectType> { /** * Sets a value into a scikit-learn object. * * @param obj The scikit-learn object. * @param value The value to be set. */ void setListField(ObjectType obj, List<Object> value); }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/IScikitLearnLoaderListOfNumpyArrayFieldSetter.java
package ai.sklearn4j.core.packaging.loaders; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import java.util.List; /** * A container for a method that sets a list of numpy array value into a scikit-learn object during deserialization. * * @param <ObjectType> The type of the scikit-learn object. * @param <ArrayType> The type of the numpy array's element. */ public interface IScikitLearnLoaderListOfNumpyArrayFieldSetter<ObjectType, ArrayType> { /** * Sets a numpy array value into a scikit-learn object. * * @param obj The scikit-learn object. * @param value The value to be set. */ void setListOfNumpyArrayField(ObjectType obj, List<NumpyArray<ArrayType>> value); }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/IScikitLearnLoaderLongFieldSetter.java
package ai.sklearn4j.core.packaging.loaders; /** * A container for a method that sets a long value into a scikit-learn object during deserialization. * * @param <ObjectType> The type of the scikit-learn object. */ public interface IScikitLearnLoaderLongFieldSetter<ObjectType> { /** * Sets a long value into a scikit-learn object. * * @param obj The scikit-learn object. * @param value The value to be set. */ void setLongField(ObjectType obj, long value); }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/IScikitLearnLoaderNumpyArrayFieldSetter.java
package ai.sklearn4j.core.packaging.loaders; import ai.sklearn4j.core.libraries.numpy.NumpyArray; /** * A container for a method that sets a numpy array value into a scikit-learn object during deserialization. * * @param <ObjectType> The type of the scikit-learn object. */ public interface IScikitLearnLoaderNumpyArrayFieldSetter<ObjectType> { /** * Sets a numpy array value into a scikit-learn object. * * @param obj The scikit-learn object. * @param value The value to be set. */ void setNumpyArrayField(ObjectType obj, NumpyArray value); }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/IScikitLearnLoaderStringArrayFieldSetter.java
package ai.sklearn4j.core.packaging.loaders; /** * A container for a method that sets a string array value into a scikit-learn object during deserialization. * * @param <ObjectType> The type of the scikit-learn object. */ public interface IScikitLearnLoaderStringArrayFieldSetter<ObjectType> { /** * Sets a string array value into a scikit-learn object. * * @param obj The scikit-learn object. * @param value The value to be set. */ void setStringArrayField(ObjectType obj, String[] value); }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/IScikitLearnLoaderStringFieldSetter.java
package ai.sklearn4j.core.packaging.loaders; /** * A container for a method that sets a string value into a scikit-learn object during deserialization. * * @param <ObjectType> The type of the scikit-learn object. */ public interface IScikitLearnLoaderStringFieldSetter<ObjectType> { /** * Sets a long value into a scikit-learn object. * * @param obj The scikit-learn object. * @param value The value to be set. */ void setStringField(ObjectType obj, String value); }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/ScikitLearnContentLoaderFactory.java
package ai.sklearn4j.core.packaging.loaders; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.packaging.loaders.classifiers.naive_bayes.*; import ai.sklearn4j.core.packaging.loaders.preprocessing.data.*; import ai.sklearn4j.core.packaging.loaders.preprocessing.label.LabelBinarizerContentLoader; import ai.sklearn4j.core.packaging.loaders.preprocessing.label.LabelEncoderContentLoader; import ai.sklearn4j.core.packaging.loaders.preprocessing.label.MultiLabelBinarizerContentLoader; import java.util.HashMap; import java.util.Map; /** * A factory class that instantiate a loader of type IScikitLearnContentLoader for a given * serializer name. */ public class ScikitLearnContentLoaderFactory { /** * A map of registered loaders. */ private static Map<String, IScikitLearnContentLoader> registeredLoaders = null; /** * Static initialization of the class and registering the available loaders. */ static { registeredLoaders = new HashMap<>(); registerNaiveBayesLoaders(); registerLabelPreprocessingLoaders(); registerDataPreprocessingLoaders(); } /** * Registers the content loaders for data preprocessing. */ private static void registerDataPreprocessingLoaders() { registerLoader(new MinimumMaximumScalerContentLoader()); registerLoader(new StandardScalerContentLoader()); registerLoader(new MaximumAbsoluteScalerContentLoader()); registerLoader(new RobustScalerContentLoader()); registerLoader(new NormalizerContentLoader()); registerLoader(new BinarizerContentLoader()); // registerLoader(new QuantileTransformerContentLoader()); } /** * Registers the content loaders for label preprocessing. */ private static void registerLabelPreprocessingLoaders() { registerLoader(new LabelEncoderContentLoader()); registerLoader(new LabelBinarizerContentLoader()); registerLoader(new MultiLabelBinarizerContentLoader()); } /** * Registers the classes coming from the naive_bayes file in sklearn repository. */ private static void registerNaiveBayesLoaders() { registerLoader(new GaussianNaiveBayesContentLoader()); registerLoader(new BernoulliNaiveBayesContentLoader()); registerLoader(new CategoricalNaiveBayesContentLoader()); registerLoader(new ComplementNaiveBayesContentLoader()); registerLoader(new MultinomialNaiveBayesContentLoader()); } /** * Registers a new loader in the static map. * * @param loader Instance of the loader to be registered. */ private static void registerLoader(IScikitLearnContentLoader loader) { registeredLoaders.put(loader.getTypeName(), loader); } /** * Returns a loader based on its name for deserialization of contents. * * @param type Name/ID of the loader. * @return An IScikitLearnContentLoader associated with the requested loader name. */ public static IScikitLearnContentLoader loaderForType(String type) { if (registeredLoaders.containsKey(type)) { return registeredLoaders.get(type); } throw new ScikitLearnCoreException(String.format("The loader type '%s' is not supported.", type)); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers/naive_bayes/BernoulliNaiveBayesContentLoader.java
package ai.sklearn4j.core.packaging.loaders.classifiers.naive_bayes; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.naive_bayes.BernoulliNaiveBayes; /** * BernoulliNaiveBayes object loader. */ public class BernoulliNaiveBayesContentLoader extends BaseScikitLearnContentLoader<BernoulliNaiveBayes> { /** * Instantiate a new object of BernoulliNaiveBayesContentLoader. */ public BernoulliNaiveBayesContentLoader() { super("nb_bernoulli_serializer"); } /** * Instantiate an unloaded BernoulliNaiveBayes classifier. * * @return The unloaded classifier. */ @Override protected BernoulliNaiveBayes createResultObject() { return new BernoulliNaiveBayes(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new BernoulliNaiveBayesContentLoader(); } /** * Defines the fields that are required to initialize a trained classifier. */ @Override protected void registerSetters() { registerNumpyArrayField("classes_", this::setClasses); registerNumpyArrayField("class_count_", this::setClassCount); registerNumpyArrayField("class_log_prior_", this::setClassLogPriors); registerNumpyArrayField("feature_log_prob_", this::setFeatureLogProbabilities); registerNumpyArrayField("feature_count_", this::setFeatureCount); registerDoubleField("binarize", this::setBinarization); } /** * Sets the binarization to be performed on the data. * * @param result The classifier to be loaded. * @param value The binarization threshold. */ private void setBinarization(BernoulliNaiveBayes result, double value) { result.setBinarizationThreshold(value); } /** * Sets the feature's log probability in the training data. * * @param result The classifier to be loaded. * @param numpyArray The feature's log probability in the training data. */ private void setFeatureLogProbabilities(BernoulliNaiveBayes result, NumpyArray numpyArray) { result.setFeatureLogProbabilities(numpyArray); } /** * Sets the frequency of the features in the training data. * * @param result The classifier to be loaded. * @param numpyArray The frequency of the features in the training data. */ private void setFeatureCount(BernoulliNaiveBayes result, NumpyArray numpyArray) { result.setFeatureCount(numpyArray); } /** * Sets the probability of each class. * * @param result The classifier to be loaded. * @param value The probability of each class. */ private void setClassLogPriors(BernoulliNaiveBayes result, NumpyArray value) { result.setClassLogPrior(value); } /** * Sets the class labels known to the classifier. * * @param result The classifier to be loaded. * @param value The class labels known to the classifier. */ private void setClasses(BernoulliNaiveBayes result, NumpyArray value) { result.setClasses(value); } /** * Sets the number of training samples observed in each class. * * @param result The classifier to be loaded. * @param value The number of training samples observed in each class. */ private void setClassCount(BernoulliNaiveBayes result, NumpyArray value) { result.setClassCounts(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers/naive_bayes/CategoricalNaiveBayesContentLoader.java
package ai.sklearn4j.core.packaging.loaders.classifiers.naive_bayes; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.naive_bayes.CategoricalNaiveBayes; import java.util.List; /** * CategoricalNaiveBayes object loader. */ public class CategoricalNaiveBayesContentLoader extends BaseScikitLearnContentLoader<CategoricalNaiveBayes> { /** * Instantiate a new object of CategoricalNaiveBayesContentLoader. */ public CategoricalNaiveBayesContentLoader() { super("nb_categorical_serializer"); } /** * Instantiate an unloaded CategoricalNaiveBayes classifier. * * @return The unloaded classifier. */ @Override protected CategoricalNaiveBayes createResultObject() { return new CategoricalNaiveBayes(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new CategoricalNaiveBayesContentLoader(); } /** * Defines the fields that are required to initialize a trained classifier. */ @Override protected void registerSetters() { registerNumpyArrayField("classes_", this::setClasses); registerNumpyArrayField("class_count_", this::setClassCount); registerNumpyArrayField("class_log_prior_", this::setClassLogPriors); registerListOfNumpyArrayField("feature_log_prob_", this::setFeatureLogProbabilities); registerListOfNumpyArrayField("category_count_", this::setCategoryCounts); registerNumpyArrayField("n_categories_", this::setNumberInCategories); } /** * Sets the field n_categories_ in the classifier. * * @param result The classifier to be loaded. * @param numpyArray The n in each category. */ private void setNumberInCategories(CategoricalNaiveBayes result, NumpyArray numpyArray) { } /** * Sets the category_count_ in the classifier. * * @param result The classifier to be loaded. * @param numpyArrays The categories count in the training data. */ private void setCategoryCounts(CategoricalNaiveBayes result, List<NumpyArray<Double>> numpyArrays) { } /** * Sets the feature's log probability in the training data. * * @param result The classifier to be loaded. * @param numpyArray The feature's log probability in the training data. */ private void setFeatureLogProbabilities(CategoricalNaiveBayes result, List<NumpyArray<Double>> numpyArray) { result.setFeatureLogProbabilities(numpyArray); } /** * Sets the probability of each class. * * @param result The classifier to be loaded. * @param value The probability of each class. */ private void setClassLogPriors(CategoricalNaiveBayes result, NumpyArray value) { result.setClassLogPrior(value); } /** * Sets the class labels known to the classifier. * * @param result The classifier to be loaded. * @param value The class labels known to the classifier. */ private void setClasses(CategoricalNaiveBayes result, NumpyArray value) { result.setClasses(value); } /** * Sets the number of training samples observed in each class. * * @param result The classifier to be loaded. * @param value The number of training samples observed in each class. */ private void setClassCount(CategoricalNaiveBayes result, NumpyArray value) { result.setClassCounts(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers/naive_bayes/ComplementNaiveBayesContentLoader.java
package ai.sklearn4j.core.packaging.loaders.classifiers.naive_bayes; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.naive_bayes.ComplementNaiveBayes; /** * ComplementNaiveBayes object loader. */ public class ComplementNaiveBayesContentLoader extends BaseScikitLearnContentLoader<ComplementNaiveBayes> { /** * Instantiate a new object of ComplementNaiveBayesContentLoader. */ public ComplementNaiveBayesContentLoader() { super("nb_complement_serializer"); } /** * Instantiate an unloaded ComplementNaiveBayes classifier. * * @return The unloaded classifier. */ @Override protected ComplementNaiveBayes createResultObject() { return new ComplementNaiveBayes(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new ComplementNaiveBayesContentLoader(); } /** * Defines the fields that are required to initialize a trained classifier. */ @Override protected void registerSetters() { registerNumpyArrayField("classes_", this::setClasses); registerNumpyArrayField("class_count_", this::setClassCount); registerNumpyArrayField("class_log_prior_", this::setClassLogPriors); registerNumpyArrayField("feature_log_prob_", this::setFeatureLogProbabilities); registerNumpyArrayField("feature_count_", this::setFeatureCount); registerNumpyArrayField("feature_all_", this::setFeatureAll); } /** * Sets the feature_all_ field. * * @param result The classifier to be loaded. * @param numpyArray The feature_all_ field. */ private void setFeatureAll(ComplementNaiveBayes result, NumpyArray numpyArray) { } /** * Sets the feature's log probability in the training data. * * @param result The classifier to be loaded. * @param numpyArray The feature's log probability in the training data. */ private void setFeatureLogProbabilities(ComplementNaiveBayes result, NumpyArray numpyArray) { result.setFeatureLogProbabilities(numpyArray); } /** * Sets the frequency of the features in the training data. * * @param result The classifier to be loaded. * @param numpyArray The frequency of the features in the training data. */ private void setFeatureCount(ComplementNaiveBayes result, NumpyArray numpyArray) { result.setFeatureCount(numpyArray); } /** * Sets the probability of each class. * * @param result The classifier to be loaded. * @param value The probability of each class. */ private void setClassLogPriors(ComplementNaiveBayes result, NumpyArray value) { result.setClassLogPrior(value); } /** * Sets the class labels known to the classifier. * * @param result The classifier to be loaded. * @param value The class labels known to the classifier. */ private void setClasses(ComplementNaiveBayes result, NumpyArray value) { result.setClasses(value); } /** * Sets the number of training samples observed in each class. * * @param result The classifier to be loaded. * @param value The number of training samples observed in each class. */ private void setClassCount(ComplementNaiveBayes result, NumpyArray value) { result.setClassCounts(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers/naive_bayes/GaussianNaiveBayesContentLoader.java
package ai.sklearn4j.core.packaging.loaders.classifiers.naive_bayes; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.naive_bayes.GaussianNaiveBayes; /** * GaussianNaiveBayes object loader. */ public class GaussianNaiveBayesContentLoader extends BaseScikitLearnContentLoader<GaussianNaiveBayes> { /** * Instantiate a new object of GaussianNaiveBayesContentLoader. */ public GaussianNaiveBayesContentLoader() { super("nb_gaussian_serializer"); } /** * Instantiate an unloaded GaussianNaiveBayes classifier. * * @return The unloaded classifier. */ @Override protected GaussianNaiveBayes createResultObject() { return new GaussianNaiveBayes(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new GaussianNaiveBayesContentLoader(); } /** * Defines the fields that are required to initialize a trained classifier. */ @Override protected void registerSetters() { registerNumpyArrayField("class_count_", this::setClassCount); registerNumpyArrayField("classes_", this::setClasses); registerNumpyArrayField("class_prior_", this::setClassPriors); registerNumpyArrayField("theta_", this::setMeanValues); registerNumpyArrayField("var_", this::setVarianceValues); } /** * Sets the variance of each feature per class. * * @param result The classifier to be loaded. * @param value The variance of each feature per class. */ private void setVarianceValues(GaussianNaiveBayes result, NumpyArray value) { result.setSigma(value); } /** * Sets the mean of each feature per class. * * @param result The classifier to be loaded. * @param value The mean of each feature per class. */ private void setMeanValues(GaussianNaiveBayes result, NumpyArray value) { result.setTheta(value); } /** * Sets the probability of each class. * * @param result The classifier to be loaded. * @param value The probability of each class. */ private void setClassPriors(GaussianNaiveBayes result, NumpyArray value) { result.setClassPriors(value); } /** * Sets the class labels known to the classifier. * * @param result The classifier to be loaded. * @param value The class labels known to the classifier. */ private void setClasses(GaussianNaiveBayes result, NumpyArray value) { result.setClasses(value); } /** * Sets the number of training samples observed in each class. * * @param result The classifier to be loaded. * @param value The number of training samples observed in each class. */ private void setClassCount(GaussianNaiveBayes result, NumpyArray value) { result.setClassCounts(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/classifiers/naive_bayes/MultinomialNaiveBayesContentLoader.java
package ai.sklearn4j.core.packaging.loaders.classifiers.naive_bayes; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.naive_bayes.MultinomialNaiveBayes; /** * MultinomialNaiveBayes object loader. */ public class MultinomialNaiveBayesContentLoader extends BaseScikitLearnContentLoader<MultinomialNaiveBayes> { /** * Instantiate a new object of MultinomialNaiveBayesContentLoader. */ public MultinomialNaiveBayesContentLoader() { super("nb_multinomial_serializer"); } /** * Instantiate an unloaded MultinomialNaiveBayes classifier. * * @return The unloaded classifier. */ @Override protected MultinomialNaiveBayes createResultObject() { return new MultinomialNaiveBayes(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new MultinomialNaiveBayesContentLoader(); } /** * Defines the fields that are required to initialize a trained classifier. */ @Override protected void registerSetters() { registerNumpyArrayField("classes_", this::setClasses); registerNumpyArrayField("class_count_", this::setClassCount); registerNumpyArrayField("class_log_prior_", this::setClassLogPriors); registerNumpyArrayField("feature_log_prob_", this::setFeatureLogProbabilities); registerNumpyArrayField("feature_count_", this::setFeatureCount); } /** * Sets the feature's log probability in the training data. * * @param result The classifier to be loaded. * @param numpyArray The feature's log probability in the training data. */ private void setFeatureLogProbabilities(MultinomialNaiveBayes result, NumpyArray numpyArray) { result.setFeatureLogProbabilities(numpyArray); } /** * Sets the frequency of the features in the training data. * * @param result The classifier to be loaded. * @param numpyArray The frequency of the features in the training data. */ private void setFeatureCount(MultinomialNaiveBayes result, NumpyArray numpyArray) { result.setFeatureCount(numpyArray); } /** * Sets the probability of each class. * * @param result The classifier to be loaded. * @param value The probability of each class. */ private void setClassLogPriors(MultinomialNaiveBayes result, NumpyArray value) { result.setClassLogPrior(value); } /** * Sets the class labels known to the classifier. * * @param result The classifier to be loaded. * @param value The class labels known to the classifier. */ private void setClasses(MultinomialNaiveBayes result, NumpyArray value) { result.setClasses(value); } /** * Sets the number of training samples observed in each class. * * @param result The classifier to be loaded. * @param value The number of training samples observed in each class. */ private void setClassCount(MultinomialNaiveBayes result, NumpyArray value) { result.setClassCounts(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/BinarizerContentLoader.java
// ================================================================== // Deserialize Binarizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.Binarizer.html#sklearn.preprocessing.Binarizer // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.data; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.data.Binarizer; /** * Binarizer object loader. */ public class BinarizerContentLoader extends BaseScikitLearnContentLoader<Binarizer> { /** * Instantiate a new object of BinarizerContentLoader. */ public BinarizerContentLoader() { super("pp_binarizer"); } /** * Instantiate an unloaded Binarizer scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected Binarizer createResultObject() { return new Binarizer(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new BinarizerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerLongField("n_features", this::setNFeaturesIn); registerStringArrayField("feature_names", this::setFeatureNamesIn); // Fields from the dir() method registerDoubleField("threshold", this::setThreshold); } /** * Feature values below or equal to this are replaced by 0, above it by 1. Threshold may not be * less than 0 for operations on sparse matrices. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setThreshold(Binarizer result, double value) { result.setThreshold(value); } /** * Sets the Number of features seen during `fit`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNFeaturesIn(Binarizer result, long value) { result.setNFeaturesIn(value); } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureNamesIn(Binarizer result, String[] value) { result.setFeatureNamesIn(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/MaximumAbsoluteScalerContentLoader.java
// ================================================================== // Deserialize MaxAbsScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MaxAbsScaler.html#sklearn.preprocessing.MaxAbsScaler // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.data; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.data.MaximumAbsoluteScaler; /** * MaximumAbsoluteScaler object loader. */ public class MaximumAbsoluteScalerContentLoader extends BaseScikitLearnContentLoader<MaximumAbsoluteScaler> { /** * Instantiate a new object of MaximumAbsoluteScalerContentLoader. */ public MaximumAbsoluteScalerContentLoader() { super("pp_max_abs_scaler"); } /** * Instantiate an unloaded MaximumAbsoluteScaler scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected MaximumAbsoluteScaler createResultObject() { return new MaximumAbsoluteScaler(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new MaximumAbsoluteScalerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerNumpyArrayField("scale_", this::setScale); registerNumpyArrayField("max_abs_", this::setMaxAbs); registerLongField("n_features", this::setNFeaturesIn); registerStringArrayField("feature_names", this::setFeatureNamesIn); registerLongField("n_samples_seen_", this::setNSamplesSeen); } /** * Sets the Per feature relative scaling of the data. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setScale(MaximumAbsoluteScaler result, NumpyArray value) { result.setScale(value); } /** * Sets the Per feature maximum absolute value. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setMaxAbs(MaximumAbsoluteScaler result, NumpyArray value) { result.setMaxAbs(value); } /** * Sets the Number of features seen during `fit`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNFeaturesIn(MaximumAbsoluteScaler result, long value) { result.setNFeaturesIn(value); } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureNamesIn(MaximumAbsoluteScaler result, String[] value) { result.setFeatureNamesIn(value); } /** * Sets the The number of samples processed by the estimator. Will be reset on new * calls to fit, but increments across `partial_fit` calls. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNSamplesSeen(MaximumAbsoluteScaler result, long value) { result.setNSamplesSeen(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/MinimumMaximumScalerContentLoader.java
// ================================================================== // Deserialize MinMaxScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MinMaxScaler.html#sklearn.preprocessing.MinMaxScaler // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.data; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.data.MinimumMaximumScaler; import java.util.List; /** * MinimumMaximumScaler object loader. */ public class MinimumMaximumScalerContentLoader extends BaseScikitLearnContentLoader<MinimumMaximumScaler> { /** * Instantiate a new object of MinimumMaximumScalerContentLoader. */ public MinimumMaximumScalerContentLoader() { super("pp_min_max_scaler"); } /** * Instantiate an unloaded MinimumMaximumScaler scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected MinimumMaximumScaler createResultObject() { return new MinimumMaximumScaler(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new MinimumMaximumScalerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerNumpyArrayField("min_", this::setMin); registerNumpyArrayField("scale_", this::setScale); registerNumpyArrayField("data_min_", this::setDataMin); registerNumpyArrayField("data_max_", this::setDataMax); registerNumpyArrayField("data_range_", this::setDataRange); registerLongField("n_features", this::setNFeaturesIn); registerLongField("n_samples_seen_", this::setNSamplesSeen); registerStringArrayField("feature_names", this::setFeatureNamesIn); // Fields from the dir() method registerLongField("clip", this::setClip); registerListField("feature_range", this::setFeatureRange); } /** * Sets the Per feature adjustment for minimum. Equivalent to `min - X.min(axis=0) * * self.scale_` * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setMin(MinimumMaximumScaler result, NumpyArray value) { result.setMin(value); } /** * Sets the Per feature relative scaling of the data. Equivalent to `(max - min) / * (X.max(axis=0) - X.min(axis=0))` * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setScale(MinimumMaximumScaler result, NumpyArray value) { result.setScale(value); } /** * Sets the Per feature minimum seen in the data * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setDataMin(MinimumMaximumScaler result, NumpyArray value) { result.setDataMin(value); } /** * Sets the Per feature maximum seen in the data * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setDataMax(MinimumMaximumScaler result, NumpyArray value) { result.setDataMax(value); } /** * Sets the Per feature range `(data_max_ - data_min_)` seen in the data * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setDataRange(MinimumMaximumScaler result, NumpyArray value) { result.setDataRange(value); } /** * Sets the Number of features seen during `fit`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNFeaturesIn(MinimumMaximumScaler result, long value) { result.setNFeaturesIn(value); } /** * Sets the The number of samples processed by the estimator. It will be reset on * new calls to fit, but increments across `partial_fit` calls. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNSamplesSeen(MinimumMaximumScaler result, long value) { result.setNSamplesSeen(value); } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureNamesIn(MinimumMaximumScaler result, String[] value) { result.setFeatureNamesIn(value); } /** * Sets the clip field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setClip(MinimumMaximumScaler result, long value) { result.setClip(value == 1); } /** * Sets the feature_range field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureRange(MinimumMaximumScaler result, List<Object> value) { double[] data = new double[value.size()]; for (int i = 0; i < data.length; i++) { data[i] = Double.valueOf(value.get(i).toString()); // WTF java?! } result.setFeatureRange(data); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/NormalizerContentLoader.java
// ================================================================== // Deserialize Normalizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.Normalizer.html#sklearn.preprocessing.Normalizer // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.data; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.data.Normalizer; /** * Normalizer object loader. */ public class NormalizerContentLoader extends BaseScikitLearnContentLoader<Normalizer> { /** * Instantiate a new object of NormalizerContentLoader. */ public NormalizerContentLoader() { super("pp_normalizer"); } /** * Instantiate an unloaded Normalizer scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected Normalizer createResultObject() { return new Normalizer(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new NormalizerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerLongField("n_features", this::setNFeaturesIn); registerStringArrayField("feature_names", this::setFeatureNamesIn); // Fields from the dir() method registerStringField("norm", this::setNorm); } /** * Sets the method for calculating the vectors norm. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNorm(Normalizer result, String value) { result.setNorm(value); } /** * Sets the Number of features seen during `fit`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNFeaturesIn(Normalizer result, long value) { result.setNFeaturesIn(value); } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureNamesIn(Normalizer result, String[] value) { result.setFeatureNamesIn(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/QuantileTransformerContentLoader.java
//// ================================================================== //// Deserialize QuantileTransformer //// //// Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.QuantileTransformer.html#sklearn.preprocessing.QuantileTransformer //// ================================================================== //package ai.sklearn4j.core.packaging.loaders.preprocessing.data; // //import ai.sklearn4j.core.libraries.numpy.NumpyArray; //import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; //import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; //import ai.sklearn4j.preprocessing.data.QuantileTransformer; // // ///** // * QuantileTransformer object loader. // */ // //public class QuantileTransformerContentLoader extends BaseScikitLearnContentLoader<QuantileTransformer> { // /** // * Instantiate a new object of QuantileTransformerContentLoader. // */ // public QuantileTransformerContentLoader() { // super("pp_quantile_transformer"); // } // // /** // * Instantiate an unloaded QuantileTransformer scikit-learn object. // * // * @return The unloaded scikit-learn object. // */ // @Override // protected QuantileTransformer createResultObject() { // return new QuantileTransformer(); // } // /** // * Create a clean instance of the loader. // * // * @return A clean instance of the loader. // */ // @Override // public IScikitLearnContentLoader duplicate() { // return new QuantileTransformerContentLoader(); // } // /** // * Defines the fields that are required to initialize a trained scikit-learn object. // */ // @Override // protected void registerSetters() { // // Fields from the documentation // registerLongField("n_quantiles_", this::setNQuantiles); // registerNumpyArrayField("quantiles_", this::setQuantiles); // registerNumpyArrayField("references_", this::setReferences); // registerLongField("n_features", this::setNFeaturesIn); // registerStringArrayField("feature_names", this::setFeatureNamesIn); // // // Fields from the dir() method // registerLongField("ignore_implicit_zeros", this::setIgnoreImplicitZeros); // registerStringField("output_distribution", this::setOutputDistribution); // registerLongField("subsample", this::setSubsample); // } // // /** // * Sets the The values corresponding the quantiles of reference. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setQuantiles(QuantileTransformer result, NumpyArray value) { // result.setQuantiles(value); // } // // /** // * Sets the Quantiles of references. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setReferences(QuantileTransformer result, NumpyArray value) { // result.setReferences(value); // } // // /** // * Sets the Number of features seen during `fit`. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setNFeaturesIn(QuantileTransformer result, long value) { // result.setNFeaturesIn(value); // } // // /** // * Sets the Names of features seen during `fit`. Defined only when `X` has feature // * names that are all strings. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setFeatureNamesIn(QuantileTransformer result, String[] value) { // result.setFeatureNamesIn(value); // } // // /** // * Sets the ignore_implicit_zeros field. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setIgnoreImplicitZeros(QuantileTransformer result, long value) { // result.setIgnoreImplicitZeros(value == 1); // } // // /** // * Sets the n_quantiles field. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setNQuantiles(QuantileTransformer result, long value) { // result.setNQuantiles(value); // } // // /** // * Sets the output_distribution field. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setOutputDistribution(QuantileTransformer result, String value) { // result.setOutputDistribution(value); // } // // /** // * Sets the subsample field. // * // * @param result The scikit-learn object to be loaded. // * @param value The loaded value from stream. // */ // private void setSubsample(QuantileTransformer result, long value) { // result.setSubsample(value); // } // //}
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/RobustScalerContentLoader.java
// ================================================================== // Deserialize RobustScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.RobustScaler.html#sklearn.preprocessing.RobustScaler // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.data; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.data.RobustScaler; import java.util.List; /** * RobustScaler object loader. */ public class RobustScalerContentLoader extends BaseScikitLearnContentLoader<RobustScaler> { /** * Instantiate a new object of RobustScalerContentLoader. */ public RobustScalerContentLoader() { super("pp_robust_scaler"); } /** * Instantiate an unloaded RobustScaler scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected RobustScaler createResultObject() { return new RobustScaler(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new RobustScalerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerNumpyArrayField("center_", this::setCenter); registerNumpyArrayField("scale_", this::setScale); registerLongField("n_features", this::setNFeaturesIn); registerStringArrayField("feature_names", this::setFeatureNamesIn); // Fields from dir() registerLongField("with_scaling", this::setWithScaling); registerLongField("with_centering", this::setWithCentering); registerLongField("unit_variance", this::setUnitVariance); registerListField("quantile_range", this::setQuantileRange); } /** * Sets the value of the field `quantile_range`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setQuantileRange(RobustScaler result, List<Object> value) { double[] quantilesRange = new double[2]; quantilesRange[0] = (double) value.get(0); quantilesRange[1] = (double) value.get(1); result.setQuantilesRange(quantilesRange); } /** * Sets the value of the field `unit_variance`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setUnitVariance(RobustScaler result, long value) { result.setUnitVariance(value == 1); } /** * Sets the value of the field `with_centering`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setWithCentering(RobustScaler result, long value) { result.setWithCentering(value == 1); } /** * Sets the value of the field `with_scaling`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setWithScaling(RobustScaler result, long value) { result.setWithScaling(value == 1); } /** * Sets the The median value for each feature in the training set. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setCenter(RobustScaler result, NumpyArray value) { result.setCenter(value); } /** * Sets the The (scaled) interquartile range for each feature in the training * set. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setScale(RobustScaler result, NumpyArray value) { result.setScale(value); } /** * Sets the Number of features seen during `fit`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNFeaturesIn(RobustScaler result, long value) { result.setNFeaturesIn(value); } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureNamesIn(RobustScaler result, String[] value) { result.setFeatureNamesIn(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/data/StandardScalerContentLoader.java
// ================================================================== // Deserialize StandardScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.data; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.data.StandardScaler; /** * StandardScaler object loader. */ public class StandardScalerContentLoader extends BaseScikitLearnContentLoader<StandardScaler> { /** * Instantiate a new object of StandardScalerContentLoader. */ public StandardScalerContentLoader() { super("pp_standard_scaler"); } /** * Instantiate an unloaded StandardScaler scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected StandardScaler createResultObject() { return new StandardScaler(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new StandardScalerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerNumpyArrayField("scale_", this::setScale); registerNumpyArrayField("mean_", this::setMean); registerNumpyArrayField("var_", this::setVar); registerLongField("n_features", this::setNFeaturesIn); registerStringArrayField("feature_names", this::setFeatureNamesIn); registerNumpyArrayField("n_samples_seen_", this::setNSamplesSeen); // Fields from the dir() method registerLongField("with_mean", this::setWithMean); registerLongField("with_std", this::setWithStd); } /** * Sets the Per feature relative scaling of the data to achieve zero mean and unit * variance. Generally this is calculated using `np.sqrt(var_)`. If a * variance is zero, we can't achieve unit variance, and the data is left * as-is, giving a scaling factor of 1. `scale_` is equal to `None` when * `with_std=False`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setScale(StandardScaler result, NumpyArray value) { result.setScale(value); } /** * Sets the The mean value for each feature in the training set. Equal to `None` * when `with_mean=False`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setMean(StandardScaler result, NumpyArray value) { result.setMean(value); } /** * Sets the The variance for each feature in the training set. Used to compute * `scale_`. Equal to `None` when `with_std=False`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setVar(StandardScaler result, NumpyArray value) { result.setVariance(value); } /** * Sets the Number of features seen during `fit`. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNFeaturesIn(StandardScaler result, long value) { result.setNFeaturesIn(value); } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setFeatureNamesIn(StandardScaler result, String[] value) { result.setFeatureNamesIn(value); } /** * Sets the The number of samples processed by the estimator for each feature. If * there are no missing samples, the `n_samples_seen` will be an integer, * otherwise it will be an array of dtype int. If `sample_weights` are * used it will be a float (if no missing data) or an array of dtype * float that sums the weights seen so far. Will be reset on new calls to * fit, but increments across `partial_fit` calls. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNSamplesSeen(StandardScaler result, NumpyArray value) { result.setNSamplesSeen(value); } /** * Sets the with_mean field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setWithMean(StandardScaler result, long value) { result.setWithMean(value == 1); } /** * Sets the with_std field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setWithStd(StandardScaler result, long value) { result.setWithStandardDeviation(value == 1); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/label/LabelBinarizerContentLoader.java
// ================================================================== // Deserialize LabelBinarizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.LabelBinarizer.html // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.label; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.label.LabelBinarizer; import java.util.List; /** * LabelBinarizer object loader. */ public class LabelBinarizerContentLoader extends BaseScikitLearnContentLoader<LabelBinarizer> { /** * Instantiate a new object of LabelBinarizerContentLoader. */ public LabelBinarizerContentLoader() { super("pp_label_binarizer"); } /** * Instantiate an unloaded LabelBinarizer scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected LabelBinarizer createResultObject() { return new LabelBinarizer(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new LabelBinarizerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerListField("classes_", this::setClasses); registerStringField("y_type_", this::setYType); // Fields from the dir() method registerLongField("neg_label", this::setNegLabel); registerLongField("pos_label", this::setPosLabel); } /** * Sets the Holds the label for each class. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setClasses(LabelBinarizer result, List<Object> value) { result.setClasses(value); } /** * Sets the Represents the type of the target data as evaluated by * utils.multiclass.type_of_target. Possible type are 'continuous', * 'continuous-multioutput', 'binary', 'multiclass', * 'multiclass-multioutput', 'multilabel-indicator', and 'unknown'. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setYType(LabelBinarizer result, String value) { result.setYType(value); } /** * Sets the neg_label field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setNegLabel(LabelBinarizer result, long value) { result.setNegativeLabel(value); } /** * Sets the pos_label field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setPosLabel(LabelBinarizer result, long value) { result.setPositiveLabel(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/label/LabelEncoderContentLoader.java
// # ================================================================== // Deserialize LabelEncoder // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.LabelEncoder.html#sklearn.preprocessing.LabelEncoder // # ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.label; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.label.LabelEncoder; import java.util.List; /** * LabelEncoder object loader. */ public class LabelEncoderContentLoader extends BaseScikitLearnContentLoader<LabelEncoder> { /** * Instantiate a new object of LabelEncoderContentLoader. */ public LabelEncoderContentLoader() { super("pp_label_encoder"); } /** * Instantiate an unloaded LabelEncoder scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected LabelEncoder createResultObject() { return new LabelEncoder(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new LabelEncoderContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerListField("classes_", this::setClasses); // Fields from the dir() method } /** * Sets the Holds the label for each class. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setClasses(LabelEncoder result, List<Object> value) { result.setClasses(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/loaders/preprocessing/label/MultiLabelBinarizerContentLoader.java
// ================================================================== // Deserialize MultiLabelBinarizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MultiLabelBinarizer.html // ================================================================== package ai.sklearn4j.core.packaging.loaders.preprocessing.label; import ai.sklearn4j.core.packaging.loaders.BaseScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.preprocessing.label.MultiLabelBinarizer; import java.util.List; import java.util.Map; /** * MultiLabelBinarizer object loader. */ public class MultiLabelBinarizerContentLoader extends BaseScikitLearnContentLoader<MultiLabelBinarizer> { /** * Instantiate a new object of MultiLabelBinarizerContentLoader. */ public MultiLabelBinarizerContentLoader() { super("pp_multilabel_binarizer"); } /** * Instantiate an unloaded MultiLabelBinarizer scikit-learn object. * * @return The unloaded scikit-learn object. */ @Override protected MultiLabelBinarizer createResultObject() { return new MultiLabelBinarizer(); } /** * Create a clean instance of the loader. * * @return A clean instance of the loader. */ @Override public IScikitLearnContentLoader duplicate() { return new MultiLabelBinarizerContentLoader(); } /** * Defines the fields that are required to initialize a trained scikit-learn object. */ @Override protected void registerSetters() { // Fields from the documentation registerListField("classes_", this::setClasses); // Fields from the dir() method registerDictionaryField("_cached_dict", this::setCachedDict); } /** * Sets the A copy of the `classes` parameter when provided. Otherwise it * corresponds to the sorted set of classes found when fitting. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setClasses(MultiLabelBinarizer result, List<Object> value) { result.setClasses(value); } /** * Sets the _cached_dict field. * * @param result The scikit-learn object to be loaded. * @param value The loaded value from stream. */ private void setCachedDict(MultiLabelBinarizer result, Map<String, Object> value) { result.setCachedDict(value); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/version_1/ScikitLearnPackageHeaderV1.java
package ai.sklearn4j.core.packaging.version_1; import ai.sklearn4j.core.packaging.IScikitLearnPackageHeader; /** * A data class that contains the values stored in the header of the binary package files. This is the * default implementation for version 1 of the binary file formats. */ public class ScikitLearnPackageHeaderV1 implements IScikitLearnPackageHeader { /** * The version of the binary package file. This value is the first 4 bytes stored in the file. */ public int fileFormatVersion = 0; /** * The sklearn4x version. This is the library used to serialize the models into file. For more * information, please see: * <p> * https://pypi.org/project/sklearn4x/ */ public String sklearn4xVersion = null; /** * The version of the scikit-learn library used to train/prepare the objects contained in the * current file. */ public String scikitLearnVersion = null; /** * The version of the numpy library used to train/prepare the objects contained in the * current file. */ public String numpyVersion = null; /** * The version of the scipy library used to train/prepare the objects contained in the * current file. */ public String scipyVersion = null; /** * Information about the python version installed that was used to create the binary package file. */ public String pythonInfo = null; /** * Information about the platform that was used to create the binary package file. */ public String platformInfo = null; /** * List of the serializers used to serialize the primary contents of the binary package. These values * are to be used for internal purposes only and should not be modified by the developers. */ public String[] serializerTypes = null; /** * Gets the version of the binary package file. This value is the first 4 bytes stored in the file. * * @return Binary package file version. */ @Override public int getFileFormatVersion() { return fileFormatVersion; } /** * Gets the version of the scikit-learn library used to train/prepare the objects contained in the * current file. * * @return scikit-learn library used to train/prepare the objects */ @Override public String getScikitLearnVersion() { return scikitLearnVersion; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/core/packaging/version_1/ScikitLearnPackageV1.java
package ai.sklearn4j.core.packaging.version_1; import ai.sklearn4j.core.packaging.BinaryModelPackage; import ai.sklearn4j.core.packaging.IScikitLearnPackage; import ai.sklearn4j.core.packaging.IScikitLearnPackageHeader; import ai.sklearn4j.core.packaging.loaders.IScikitLearnContentLoader; import ai.sklearn4j.core.packaging.loaders.ScikitLearnContentLoaderFactory; import java.util.HashMap; import java.util.Map; /** * A data class that contains the values stored in the binary package files. This is the default * implementation for version 1 of the binary file formats. */ public class ScikitLearnPackageV1 implements IScikitLearnPackage { /** * The object storing information in the file header. */ private ScikitLearnPackageHeaderV1 header = null; /** * Map of the scikit-learn objects of the binary package file. */ private Map<String, Object> primaryContent = null; /** * Extra information that the developer added to the package file. */ private Map<String, Object> extras = null; /** * Gets the object that stores the information provided in the binary package file header. The header * include at the minimum the version of the file and the version of scikit-learn used to train the * content of the file. * * @return An instance of IScikitLearnPackageHeader containing the parsed information of the file header. */ public IScikitLearnPackageHeader getPackageHeader() { return header; } /** * Gets a Map[String: Object] of the extra values stored by the developer when saving the binary package. * * @return A dictionary that contains the extra values stored along with the binary package file. */ public Map<String, Object> getExtraValues() { return extras; } /** * Get the primary content stored in binary package file. * * @param name Name of the content to retrieve. * @return A scikit-learn object that can now be used in Java. */ public Object getModel(String name) { return primaryContent.get(name); } /** * Loads the binary package from a file. * * @param path Path of file to be loaded. */ @Override public void loadFromFile(String path) { BinaryModelPackage buffer = BinaryModelPackage.fromFile(path); loadFileHeader(buffer); loadFilePrimaryContent(buffer); loadFileExtraContent(buffer); } /** * Loads the extra information that the developer added to the package file. * * @param buffer The wrapper over the input file/stream. */ private void loadFileExtraContent(BinaryModelPackage buffer) { if (buffer.canRead()) { extras = buffer.readDictionary(); } else { extras = new HashMap<>(); } } /** * Loads the primary content stored in binary package file. * * @param buffer The wrapper over the input file/stream. */ private void loadFilePrimaryContent(BinaryModelPackage buffer) { primaryContent = new HashMap<>(); for (String serializerType : header.serializerTypes) { IScikitLearnContentLoader loader = ScikitLearnContentLoaderFactory.loaderForType(serializerType); String name = buffer.readString(); primaryContent.put(name, loader.loadContent(buffer)); } } /** * Loads the header into an ScikitLearnPackageHeaderV1 object. * * @param buffer The wrapper over the input file/stream. */ private void loadFileHeader(BinaryModelPackage buffer) { header = new ScikitLearnPackageHeaderV1(); header.fileFormatVersion = buffer.readInteger(); Map<String, Object> headerValues = buffer.readDictionary(); header.sklearn4xVersion = (String) headerValues.get("sklearn4x_version"); header.scikitLearnVersion = (String) headerValues.get("scikit_learn_version"); header.numpyVersion = (String) headerValues.get("numpy_version"); header.scipyVersion = (String) headerValues.get("scipy_version"); header.pythonInfo = (String) headerValues.get("python_info"); header.platformInfo = (String) headerValues.get("platform_info"); header.serializerTypes = (String[]) headerValues.get("serializer_types"); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/naive_bayes/BaseNaiveBayes.java
package ai.sklearn4j.naive_bayes; import ai.sklearn4j.base.ClassifierMixin; import ai.sklearn4j.core.libraries.Scipy; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; /** * Abstract base class for naive Bayes estimators */ public abstract class BaseNaiveBayes extends ClassifierMixin { /** * Compute the unnormalized posterior log probability of X. * <p> * I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape * (n_samples, n_classes). * <p> * predict, predictProbabilities, and predictLogProbabilities pass the input over to * jointLogLikelihood. * * @param x An array-like of shape (n_samples, n_classes). */ protected abstract NumpyArray<Double> jointLogLikelihood(NumpyArray<Double> x); /** * Perform classification on an array of test vectors X. * * @param x Array-like of shape (n_samples, n_features) The input samples. * @return NumpyArray of shape (n_samples,) Predicted target values for X. */ public NumpyArray<Long> predict(NumpyArray<Double> x) { NumpyArray<Double> jll = jointLogLikelihood(x); return Numpy.argmax(jll, 1, false); } /** * Return log-probability estimates for the test vector X. * * @param x array-like of shape (n_samples, n_features) The input samples. * @return array-like of shape (n_samples, n_classes) * Returns the log-probability of the samples for each class in * the model. The columns correspond to the classes in sorted * order, as they appear in the attribute :term:`classes_`. */ public NumpyArray<Double> predictLogProbabilities(NumpyArray<Double> x) { NumpyArray<Double> jll = jointLogLikelihood(x); NumpyArray<Double> logProbabilityOfX = Scipy.logSumExponent(jll, 1); return Numpy.subtract(jll, Numpy.atLeast2D(logProbabilityOfX).transpose()); } /** * Return probability estimates for the test vector X. * * @param x array-like of shape (n_samples, n_features) The input samples. * @return array-like of shape (n_samples, n_classes) * Returns the probability of the samples for each class in * the model. The columns correspond to the classes in sorted * order, as they appear in the attribute :term:`classes_`. */ public NumpyArray<Double> predictProbabilities(NumpyArray<Double> x) { return Numpy.exp(predictLogProbabilities(x)); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/naive_bayes/BernoulliNaiveBayes.java
package ai.sklearn4j.naive_bayes; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.utils.ExtMath; import ai.sklearn4j.utils.Preprocessings; /** * Naive Bayes classifier for bernoulli distributed models. */ public class BernoulliNaiveBayes extends BaseNaiveBayes { /** * Empirical log probability of features given a class, P(x_i|y). */ private NumpyArray<Double> featureLogProbabilities = null; /** * Log probability of each class (smoothed). */ private NumpyArray<Double> classLogPrior = null; /** * Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ private NumpyArray<Double> featureCounts = null; /** * Threshold for binarizing (mapping to booleans) of sample features. If None, input is presumed to already consist of binary vectors. */ private double binarizationThreshold = 0.0; /** * Compute the unnormalized posterior log probability of X. * <p> * I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape * (n_samples, n_classes). * <p> * predict, predictProbabilities, and predictLogProbabilities pass the input over to * jointLogLikelihood. * * @param x An array-like of shape (n_samples, n_classes). * @return The unnormalized posterior log probability of X. */ @Override protected NumpyArray<Double> jointLogLikelihood(NumpyArray<Double> x) { /** """Calculate the posterior log probability of the samples X""" n_features = self.feature_log_prob_.shape[1] n_features_X = X.shape[1] if n_features_X != n_features: raise ValueError( "Expected input with %d features, got %d instead" % (n_features, n_features_X) ) neg_prob = np.log(1 - np.exp(self.feature_log_prob_)) # Compute neg_prob · (1 - X).T as ∑neg_prob - X · neg_prob jll = safe_sparse_dot(X, (self.feature_log_prob_ - neg_prob).T) jll += self.class_log_prior_ + neg_prob.sum(axis=1) return jll */ x = Preprocessings.binarizeInput(x, binarizationThreshold); int n_features = this.featureLogProbabilities.getShape()[1]; int n_features_X = x.getShape()[1]; if (n_features != n_features_X) { throw new ScikitLearnCoreException(String.format("Expected input with %d features, got %d instead.", n_features, n_features_X)); } NumpyArray<Double> featureProbabilities = Numpy.exp(featureLogProbabilities); NumpyArray<Double> negProb = Numpy.log(Numpy.add(Numpy.multiply(featureProbabilities, -1), 1.0)); // Compute neg_prob · (1 - X).T as ∑neg_prob - X · neg_prob NumpyArray<Double> jll = ExtMath.dot(x, Numpy.subtract(featureLogProbabilities, negProb).transpose()); jll = Numpy.add(jll, Numpy.add(this.classLogPrior, Numpy.sum(negProb, 1, false))); return jll; } /** * Gets the empirical log probability of features given a class, P(x_i|y). * * @return Empirical log probability of features given a class, P(x_i|y). */ public NumpyArray<Double> getFeatureLogProbabilities() { return featureLogProbabilities; } /** * Sets the empirical log probability of features given a class, P(x_i|y). * * @param featureLogProbabilities The empirical log probability of features given a class, P(x_i|y). */ public void setFeatureLogProbabilities(NumpyArray<Double> featureLogProbabilities) { this.featureLogProbabilities = featureLogProbabilities; } /** * Gets the log probability of each class (smoothed). * * @return Log probability of each class (smoothed). */ public NumpyArray<Double> getClassLogPrior() { return classLogPrior; } /** * Sets the log probability of each class (smoothed). * * @param classLogPrior The log probability of each class (smoothed). */ public void setClassLogPrior(NumpyArray<Double> classLogPrior) { this.classLogPrior = classLogPrior; } /** * Gets the number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. * * @return Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ public NumpyArray<Double> getFeatureCounts() { return featureCounts; } /** * Sets the number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. * * @param featureCounts The number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ public void setFeatureCount(NumpyArray<Double> featureCounts) { this.featureCounts = featureCounts; } /** * Gets the threshold for binarizing (mapping to booleans) of sample features. If None, input * is presumed to already consist of binary vectors. * * @return Threshold for binarizing (mapping to booleans) of sample features. If None, input * is presumed to already consist of binary vectors. */ public double getBinarizationThreshold() { return binarizationThreshold; } /** * Sets the threshold for binarizing (mapping to booleans) of sample features. If None, input is presumed to already consist of binary vectors. * * @param binarizationThreshold The threshold for binarizing (mapping to booleans) of sample features. If None, input is presumed to already consist of binary vectors. */ public void setBinarizationThreshold(double binarizationThreshold) { this.binarizationThreshold = binarizationThreshold; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/naive_bayes/CategoricalNaiveBayes.java
package ai.sklearn4j.naive_bayes; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; import java.util.List; /** * Naive Bayes classifier for categorical features. * <p> * The categorical Naive Bayes classifier is suitable for classification with discrete features that * are categorically distributed. The categories of each feature are drawn from a categorical * distribution. */ public class CategoricalNaiveBayes extends BaseNaiveBayes { /** * Empirical log probability of features given a class, P(x_i|y). */ private List<NumpyArray<Double>> featureLogProbabilities = null; /** * Log probability of each class (smoothed). */ private NumpyArray<Double> classLogPrior = null; /** * Holds arrays of shape (n_classes, n_categories of respective feature) for each feature. * Each array provides the number of samples encountered for each class and category of the * specific feature. */ private NumpyArray<Double> categoryCounts = null; /** * Number of categories for each feature. This value is inferred from the data or set by the * minimum number of categories. */ private NumpyArray<Long> numberOfCategories = null; /** * Compute the unnormalized posterior log probability of X. * <p> * I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape * (n_samples, n_classes). * <p> * predict, predictProbabilities, and predictLogProbabilities pass the input over to * jointLogLikelihood. * * @param x An array-like of shape (n_samples, n_classes). */ @Override protected NumpyArray<Double> jointLogLikelihood(NumpyArray<Double> x) { NumpyArray<Double> jll = NumpyArrayFactory.arrayOfDoubleWithShape(new int[]{x.getShape()[0], classCounts.getShape()[0]}); for (int i = 0; i < getNumberOfFeatures(); i++) { int[] indices = getArrayFirstDimension(x, i); NumpyArray<Double> logProb = this.featureLogProbabilities.get(i); int classCount = classes.getShape()[0]; double[][] temp = new double[classCount][indices.length]; for (int cls = 0; cls < classCount; cls++) { for (int j = 0; j < indices.length; j++) { temp[cls][j] = logProb.get(cls, indices[j]); } } NumpyArray<Double> t = NumpyArrayFactory.from(temp).transpose(); jll = Numpy.add(jll, t); } return Numpy.add(jll, classLogPrior); } /** * Gets the values of the first dimension. Equivalent to numpy data[:, secondDimensionIndex] * * @param x Array to be sliced. * @param secondDimensionIndex The value of the second dimension. * @return The sliced first dimension. */ private int[] getArrayFirstDimension(NumpyArray<Double> x, int secondDimensionIndex) { int[] indices = new int[x.getShape()[0]]; for (int j = 0; j < indices.length; j++) { double value = x.get(j, secondDimensionIndex); indices[j] = (int) value; } return indices; } /** * Gets the log probability of each class (smoothed). * * @return Log probability of each class (smoothed). */ public NumpyArray<Double> getClassLogPrior() { return classLogPrior; } /** * Sets the log probability of each class (smoothed). * * @param classLogPrior The log probability of each class (smoothed). */ public void setClassLogPrior(NumpyArray<Double> classLogPrior) { this.classLogPrior = classLogPrior; } /** * Gets the empirical log probability of features given a class, P(x_i|y). * * @return Empirical log probability of features given a class, P(x_i|y). */ public List<NumpyArray<Double>> getFeatureLogProbabilities() { return featureLogProbabilities; } /** * Sets the empirical log probability of features given a class, P(x_i|y). * * @param featureLogProbabilities The empirical log probability of features given a class, P(x_i|y). */ public void setFeatureLogProbabilities(List<NumpyArray<Double>> featureLogProbabilities) { this.featureLogProbabilities = featureLogProbabilities; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/naive_bayes/ComplementNaiveBayes.java
package ai.sklearn4j.naive_bayes; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.utils.ExtMath; /** * Naive Bayes classifier for complement distributed models. */ public class ComplementNaiveBayes extends BaseNaiveBayes { /** * Empirical log probability of features given a class, P(x_i|y). */ private NumpyArray<Double> featureLogProbabilities = null; /** * Log probability of each class (smoothed). */ private NumpyArray<Double> classLogPrior = null; /** * Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ private NumpyArray<Double> featureCounts = null; /** * The value of the feature_all_ field. */ private NumpyArray<Double> featureAll = null; /** * Compute the unnormalized posterior log probability of X. * <p> * I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape * (n_samples, n_classes). * <p> * predict, predictProbabilities, and predictLogProbabilities pass the input over to * jointLogLikelihood. * * @param x An array-like of shape (n_samples, n_classes). * @return The unnormalized posterior log probability of X. */ @Override protected NumpyArray<Double> jointLogLikelihood(NumpyArray<Double> x) { /* jll = safe_sparse_dot(X, self.feature_log_prob_.T) if len(self.classes_) == 1: jll += self.class_log_prior_ return jll */ NumpyArray<Double> jll = ExtMath.dot(x, featureLogProbabilities.transpose()); if (classes.getShape()[0] == 1) { jll = Numpy.add(jll, classLogPrior); } return jll; } /** * Gets the empirical log probability of features given a class, P(x_i|y). * * @return Empirical log probability of features given a class, P(x_i|y). */ public NumpyArray<Double> getFeatureLogProbabilities() { return featureLogProbabilities; } /** * Sets the empirical log probability of features given a class, P(x_i|y). * * @param featureLogProbabilities The empirical log probability of features given a class, P(x_i|y). */ public void setFeatureLogProbabilities(NumpyArray<Double> featureLogProbabilities) { this.featureLogProbabilities = featureLogProbabilities; } /** * Gets the log probability of each class (smoothed). * * @return Log probability of each class (smoothed). */ public NumpyArray<Double> getClassLogPrior() { return classLogPrior; } /** * Sets the log probability of each class (smoothed). * * @param classLogPrior The log probability of each class (smoothed). */ public void setClassLogPrior(NumpyArray<Double> classLogPrior) { this.classLogPrior = classLogPrior; } /** * Gets the number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. * * @return Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ public NumpyArray<Double> getFeatureCounts() { return featureCounts; } /** * Sets the number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. * * @param featureCounts The number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ public void setFeatureCount(NumpyArray<Double> featureCounts) { this.featureCounts = featureCounts; } /** * Get the value of the feature_all_ field. * * @return The value of the feature_all_ field. */ public NumpyArray<Double> getFeatureAll() { return featureAll; } /** * Sets the value of the value of the feature_all_ field. * * @param featureAll The value of the feature_all_ field. */ public void setFeatureAll(NumpyArray<Double> featureAll) { this.featureAll = featureAll; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/naive_bayes/GaussianNaiveBayes.java
package ai.sklearn4j.naive_bayes; import ai.sklearn4j.core.Constants; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; import ai.sklearn4j.core.libraries.numpy.wrappers.Dim2DoubleNumpyWrapper; /** * Naive Bayes classifier for normal distributed models. */ public class GaussianNaiveBayes extends BaseNaiveBayes { /** * The prior probability of each class. * Dimension: (class_count) */ private NumpyArray<Double> classPriors = null; /** * The user provided class priors. If specified, the priors are not adjusted according to the data. * Dimension: (class_count) */ private NumpyArray<Double> priors = null; /** * Variance of each feature per class. * Dimension: (n_classes, n_features) */ private NumpyArray<Double> sigma = null; /** * Mean of each feature per class. * Dimension: (n_classes, n_features) */ private NumpyArray<Double> theta = null; /** * Compute the unnormalized posterior log probability of X. * <p> * I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape * (n_samples, n_classes). * <p> * predict, predictProbabilities, and predictLogProbabilities pass the input over to * jointLogLikelihood. * * @param x An array-like of shape (n_samples, n_classes). * @return The unnormalized posterior log probability of X. */ @Override protected NumpyArray<Double> jointLogLikelihood(NumpyArray<Double> x) { int count = x.getShape()[0]; int classCount = classCounts.getShape()[0]; int featureCount = sigma.getShape()[1]; double[][] jointLogLikelihood = new double[count][classCount]; double[][] variance = ((Dim2DoubleNumpyWrapper) sigma.getWrapper()).getArray(); double[][] mean = ((Dim2DoubleNumpyWrapper) theta.getWrapper()).getArray(); for (int cls = 0; cls < classCount; cls++) { double sumOfLogVariance = 0; for (int feature = 0; feature < featureCount; feature++) { sumOfLogVariance += Math.log(2.0 * Constants.PI * variance[cls][feature]); } double jointi = Math.log(classPriors.get(cls)); for (int i = 0; i < count; i++) { double value = 0; for (int feature = 0; feature < featureCount; feature++) { double diff = x.get(i, feature) - mean[cls][feature]; value += (Math.pow(x.get(i, feature) - mean[cls][feature], 2) / variance[cls][feature]); } value = -0.5 * (sumOfLogVariance + value); jointLogLikelihood[i][cls] = value + jointi; } } return NumpyArrayFactory.from(jointLogLikelihood); } /** * Gets the class priors. * * @return The value of class priors. */ public NumpyArray<Double> getClassPriors() { return classPriors; } /** * Sets the class priors. * * @param classPriors New value to be stored. */ public void setClassPriors(NumpyArray<Double> classPriors) { this.classPriors = classPriors; } /** * Gets the priors. * * @return The value of priors. */ public NumpyArray<Double> getPriors() { return priors; } /** * Sets the priors. * * @param priors New value to be stored. */ public void setPriors(NumpyArray<Double> priors) { this.priors = priors; } /** * Gets the variance of the features. * * @return The value of variance of the features. */ public NumpyArray<Double> getSigma() { return sigma; } /** * Sets the variance of the features. * * @param sigma New value to be stored. */ public void setSigma(NumpyArray<Double> sigma) { this.sigma = sigma; } /** * Gets the mean of the features. * * @return The value of mean of the features. */ public NumpyArray<Double> getTheta() { return theta; } /** * Sets the mean of the features. * * @param theta New value to be stored. */ public void setTheta(NumpyArray<Double> theta) { this.theta = theta; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/naive_bayes/MultinomialNaiveBayes.java
package ai.sklearn4j.naive_bayes; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.utils.ExtMath; /** * Naive Bayes classifier for multinomial distributed models. */ public class MultinomialNaiveBayes extends BaseNaiveBayes { /** * Empirical log probability of features given a class, P(x_i|y). */ private NumpyArray<Double> featureLogProbabilities = null; /** * Log probability of each class (smoothed). */ private NumpyArray<Double> classLogPrior = null; /** * Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ private NumpyArray<Double> featureCounts = null; /** * Compute the unnormalized posterior log probability of X. * <p> * I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape * (n_samples, n_classes). * <p> * predict, predictProbabilities, and predictLogProbabilities pass the input over to * jointLogLikelihood. * * @param x An array-like of shape (n_samples, n_classes). * @return The unnormalized posterior log probability of X. */ @Override protected NumpyArray<Double> jointLogLikelihood(NumpyArray<Double> x) { return Numpy.add(ExtMath.dot(x, featureLogProbabilities.transpose()), classLogPrior); } /** * Gets the empirical log probability of features given a class, P(x_i|y). * * @return Empirical log probability of features given a class, P(x_i|y). */ public NumpyArray<Double> getFeatureLogProbabilities() { return featureLogProbabilities; } /** * Sets the empirical log probability of features given a class, P(x_i|y). * * @param featureLogProbabilities The empirical log probability of features given a class, P(x_i|y). */ public void setFeatureLogProbabilities(NumpyArray<Double> featureLogProbabilities) { this.featureLogProbabilities = featureLogProbabilities; } /** * Gets the log probability of each class (smoothed). * * @return Log probability of each class (smoothed). */ public NumpyArray<Double> getClassLogPrior() { return classLogPrior; } /** * Sets the log probability of each class (smoothed). * * @param classLogPrior The log probability of each class (smoothed). */ public void setClassLogPrior(NumpyArray<Double> classLogPrior) { this.classLogPrior = classLogPrior; } /** * Gets the number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. * * @return Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ public NumpyArray<Double> getFeatureCounts() { return featureCounts; } /** * Sets the number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. * * @param featureCounts The number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. */ public void setFeatureCount(NumpyArray<Double> featureCounts) { this.featureCounts = featureCounts; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/Binarizer.java
// ================================================================== // Inference for Binarizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.Binarizer.html#sklearn.preprocessing.Binarizer // ================================================================== package ai.sklearn4j.preprocessing.data; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; /** * Binarize data (set feature values to 0 or 1) according to a * threshold. * Values greater than the threshold map to 1, while values less than or * equal to the threshold map to 0. With the default threshold of 0, only * positive values map to 1. * Binarization is a common operation on text count data where the * analyst can decide to only consider the presence or absence of a * feature rather than a quantified number of occurrences for instance. * It can also be used as a pre-processing step for estimators that * consider boolean random variables (e.g. modelled using the Bernoulli * distribution in a Bayesian setting). */ public class Binarizer extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { /** * Instantiate a new object of Binarizer. */ public Binarizer() { } /** * Number of features seen during `fit`. */ private long nFeaturesIn = 0; /** * Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ private String[] featureNamesIn = null; /** * Feature values below or equal to this are replaced by 0, above it by 1. Threshold * may not be less than 0 for operations on sparse matrices. */ private double threshold = 0.0; /** * Sets the Number of features seen during `fit`. * * @param value The new value for nFeaturesIn. */ public void setNFeaturesIn(long value) { this.nFeaturesIn = value; } /** * Gets the Number of features seen during `fit`. */ public long getNFeaturesIn() { return this.nFeaturesIn; } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param value The new value for featureNamesIn. */ public void setFeatureNamesIn(String[] value) { this.featureNamesIn = value; } /** * Gets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ public String[] getFeatureNamesIn() { return this.featureNamesIn; } /** * Gets the threshold for binarization. * @return The threshold for binarization. */ public double getThreshold() { return threshold; } /** * Sets the threshold for binarization. * @param value The threshold for binarization. */ public void setThreshold(double value) { this.threshold = value; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Double> transform(NumpyArray<Double> array) { NumpyArray<Double> result = NumpyArrayFactory.createArrayOfShapeAndTypeInfo(array); array.applyToEachElementAnsSaveToTarget(result, value -> { if (value > threshold) { return 1.0; } return 0.0; }); return result; } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { throw new ScikitLearnCoreException("The inverse transform is not available for the Binarizer preprocessing."); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/MaximumAbsoluteScaler.java
// ================================================================== // Inference for MaxAbsScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MaxAbsScaler.html#sklearn.preprocessing.MaxAbsScaler // ================================================================== package ai.sklearn4j.preprocessing.data; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; /** * Scale each feature by its maximum absolute value. * This estimator scales and translates each feature individually such * that the maximal absolute value of each feature in the training set * will be 1.0. It does not shift/center the data, and thus does not * destroy any sparsity. * This scaler can also be applied to sparse CSR or CSC matrices. */ public class MaximumAbsoluteScaler extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { /** * Instantiate a new object of MaximumAbsoluteScaler. */ public MaximumAbsoluteScaler() { } /** * Per feature relative scaling of the data. */ private NumpyArray scale = null; /** * Per feature maximum absolute value. */ private NumpyArray maxAbs = null; /** * Number of features seen during `fit`. */ private long nFeaturesIn = 0; /** * Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ private String[] featureNamesIn = null; /** * The number of samples processed by the estimator. Will be reset on new * calls to fit, but increments across `partial_fit` calls. */ private long nSamplesSeen = 0; /** * Sets the Per feature relative scaling of the data. * * @param value The new value for scale. */ public void setScale(NumpyArray value) { this.scale = value; } /** * Gets the Per feature relative scaling of the data. */ public NumpyArray getScale() { return this.scale; } /** * Sets the Per feature maximum absolute value. * * @param value The new value for maxAbs. */ public void setMaxAbs(NumpyArray value) { this.maxAbs = value; } /** * Gets the Per feature maximum absolute value. */ public NumpyArray getMaxAbs() { return this.maxAbs; } /** * Sets the Number of features seen during `fit`. * * @param value The new value for nFeaturesIn. */ public void setNFeaturesIn(long value) { this.nFeaturesIn = value; } /** * Gets the Number of features seen during `fit`. */ public long getNFeaturesIn() { return this.nFeaturesIn; } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param value The new value for featureNamesIn. */ public void setFeatureNamesIn(String[] value) { this.featureNamesIn = value; } /** * Gets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ public String[] getFeatureNamesIn() { return this.featureNamesIn; } /** * Sets the The number of samples processed by the estimator. Will be reset on new * calls to fit, but increments across `partial_fit` calls. * * @param value The new value for nSamplesSeen. */ public void setNSamplesSeen(long value) { this.nSamplesSeen = value; } /** * Gets the The number of samples processed by the estimator. Will be reset on new * calls to fit, but increments across `partial_fit` calls. */ public long getNSamplesSeen() { return this.nSamplesSeen; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Double> transform(NumpyArray<Double> array) { return Numpy.divide(array, scale); } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { return Numpy.multiply(array, scale); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/MinimumMaximumScaler.java
// ================================================================== // Inference for MinMaxScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MinMaxScaler.html#sklearn.preprocessing.MinMaxScaler // ================================================================== package ai.sklearn4j.preprocessing.data; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; /** * Transform features by scaling each feature to a given range. * This estimator scales and translates each feature individually such * that it is in the given range on the training set, e.g. between zero * and one. * The transformation is given by:: * X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0)) * X_scaled = X_std * (max - min) + min * where min, max = feature_range. * This transformation is often used as an alternative to zero mean, unit * variance scaling. */ public class MinimumMaximumScaler extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { /** * Instantiate a new object of MinimumMaximumScaler. */ public MinimumMaximumScaler() { } /** * Per feature adjustment for minimum. Equivalent to `min - X.min(axis=0) * * self.scale_` */ private NumpyArray<Double> min = null; /** * Per feature relative scaling of the data. Equivalent to `(max - min) / * (X.max(axis=0) - X.min(axis=0))` */ private NumpyArray<Double> scale = null; /** * Per feature minimum seen in the data */ private NumpyArray<Double> dataMin = null; /** * Per feature maximum seen in the data */ private NumpyArray<Double> dataMax = null; /** * Per feature range `(data_max_ - data_min_)` seen in the data */ private NumpyArray<Double> dataRange = null; /** * Number of features seen during `fit`. */ private long nFeaturesIn = 0; /** * The number of samples processed by the estimator. It will be reset on * new calls to fit, but increments across `partial_fit` calls. */ private long nSamplesSeen = 0; /** * Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ private String[] featureNamesIn = null; /** * Internal field of scikit-learn object. */ private boolean clip = false; /** * Internal field of scikit-learn object. */ private double[] featureRange = null; /** * Sets the Per feature adjustment for minimum. Equivalent to `min - X.min(axis=0) * * self.scale_` * * @param value The new value for min. */ public void setMin(NumpyArray value) { this.min = value; } /** * Gets the Per feature adjustment for minimum. Equivalent to `min - X.min(axis=0) * * self.scale_` */ public NumpyArray getMin() { return this.min; } /** * Sets the Per feature relative scaling of the data. Equivalent to `(max - min) / * (X.max(axis=0) - X.min(axis=0))` * * @param value The new value for scale. */ public void setScale(NumpyArray value) { this.scale = value; } /** * Gets the Per feature relative scaling of the data. Equivalent to `(max - min) / * (X.max(axis=0) - X.min(axis=0))` */ public NumpyArray getScale() { return this.scale; } /** * Sets the Per feature minimum seen in the data * * @param value The new value for dataMin. */ public void setDataMin(NumpyArray value) { this.dataMin = value; } /** * Gets the Per feature minimum seen in the data */ public NumpyArray getDataMin() { return this.dataMin; } /** * Sets the Per feature maximum seen in the data * * @param value The new value for dataMax. */ public void setDataMax(NumpyArray value) { this.dataMax = value; } /** * Gets the Per feature maximum seen in the data */ public NumpyArray getDataMax() { return this.dataMax; } /** * Sets the Per feature range `(data_max_ - data_min_)` seen in the data * * @param value The new value for dataRange. */ public void setDataRange(NumpyArray value) { this.dataRange = value; } /** * Gets the Per feature range `(data_max_ - data_min_)` seen in the data */ public NumpyArray getDataRange() { return this.dataRange; } /** * Sets the Number of features seen during `fit`. * * @param value The new value for nFeaturesIn. */ public void setNFeaturesIn(long value) { this.nFeaturesIn = value; } /** * Gets the Number of features seen during `fit`. */ public long getNFeaturesIn() { return this.nFeaturesIn; } /** * Sets the The number of samples processed by the estimator. It will be reset on * new calls to fit, but increments across `partial_fit` calls. * * @param value The new value for nSamplesSeen. */ public void setNSamplesSeen(long value) { this.nSamplesSeen = value; } /** * Gets the The number of samples processed by the estimator. It will be reset on * new calls to fit, but increments across `partial_fit` calls. */ public long getNSamplesSeen() { return this.nSamplesSeen; } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param value The new value for featureNamesIn. */ public void setFeatureNamesIn(String[] value) { this.featureNamesIn = value; } /** * Gets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ public String[] getFeatureNamesIn() { return this.featureNamesIn; } /** * Sets the value of Clip * * @param value The new value for Clip. */ public void setClip(boolean value) { this.clip = value; } /** * Gets the value of Clip */ public boolean getClip() { return this.clip; } /** * Sets the value of FeatureRange * * @param value The new value for FeatureRange. */ public void setFeatureRange(double[] value) { this.featureRange = value; } /** * Gets the value of FeatureRange */ public double[] getFeatureRange() { return this.featureRange; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Double> transform(NumpyArray<Double> array) { NumpyArray<Double> result = Numpy.multiply(array, scale); result = Numpy.add(result, min); if (clip) { result = Numpy.clip(result, featureRange[0], featureRange[1]); } return result; } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { NumpyArray<Double> result = Numpy.subtract(array, min); result = Numpy.divide(result, scale); return result; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/Normalizer.java
// ================================================================== // Inference for Normalizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.Normalizer.html#sklearn.preprocessing.Normalizer // ================================================================== package ai.sklearn4j.preprocessing.data; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; import ai.sklearn4j.utils.ExtMath; /** * Normalize samples individually to unit norm. * Each sample (i.e. each row of the data matrix) with at least one non * zero component is rescaled independently of other samples so that its * norm (l1, l2 or inf) equals one. * This transformer is able to work both with dense numpy arrays and * scipy.sparse matrix (use CSR format if you want to avoid the burden of * a copy / conversion). * Scaling inputs to unit norms is a common operation for text * classification or clustering for instance. For instance the dot * product of two l2-normalized TF-IDF vectors is the cosine similarity * of the vectors and is the base similarity metric for the Vector Space * Model commonly used by the Information Retrieval community. */ public class Normalizer extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { /** * Instantiate a new object of Normalizer. */ public Normalizer() { } /** * Number of features seen during `fit`. */ private long nFeaturesIn = 0; /** * Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ private String[] featureNamesIn = null; /** * The norm to use to normalize each non zero sample. If norm=’max’ is used, values will * be rescaled by the maximum of the absolute values. */ private String norm = null; /** * Sets the Number of features seen during `fit`. * * @param value The new value for nFeaturesIn. */ public void setNFeaturesIn(long value) { this.nFeaturesIn = value; } /** * Gets the Number of features seen during `fit`. */ public long getNFeaturesIn() { return this.nFeaturesIn; } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param value The new value for featureNamesIn. */ public void setFeatureNamesIn(String[] value) { this.featureNamesIn = value; } /** * Gets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ public String[] getFeatureNamesIn() { return this.featureNamesIn; } /** * Gets the type of norm that the object performs. The value is either l1, l2, or max. * * @return The norm applied by the Normalizer. */ public String getNorm() { return norm; } /** * Sets the type of norm that the object performs. The value is either l1, l2, or max. * @param norm The type of norm, either l1, l2, or max. */ public void setNorm(String norm) { this.norm = norm; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Double> transform(NumpyArray<Double> array) { // if norm == "l1": // norms = np.abs(X).sum(axis=1) // elif norm == "l2": // norms = row_norms(X) // elif norm == "max": // norms = np.max(abs(X), axis=1) // norms = _handle_zeros_in_scale(norms, copy=False) // X /= norms[:, np.newaxis] NumpyArray<Double> result = null; NumpyArray<Double> norms = null; if ("l1".equals(norm)) { norms = Numpy.sum(Numpy.abs(array), 1, false); } else if ("l2".equals(norm)) { norms = ExtMath.rowNorm(array); } else if ("max".equals(norm)) { norms = Numpy.arrayMax(Numpy.abs(array), 1, false); } handleZerosInScale(norms); result = Numpy.divide(array, addTrailingOneDimension(norms)); return result; } /** * Adds a trailing dimension of 1 to the end of the shape. Effectively, transforms a (n,) array * to (n, 1). * @param array The input array. * @return The array with an additional 1 dimension at the end of the shape. */ private NumpyArray<Double> addTrailingOneDimension(NumpyArray<Double> array) { double[][] result = new double[array.getShape()[0]][1]; for (int i = 0; i < result.length; i++) { result[i][0] = array.get(i); } return NumpyArrayFactory.from(result); } /** * Set scales of near constant features to 1. * <p> * The goal is to avoid division by very small or zero values. * <p> * Near constant features are detected automatically by identifying scales close to machine * precision unless they are precomputed by the caller and passed with the `constant_mask` kwarg. * <p> * Typically for standard scaling, the scales are the standard deviation while near constant * features are better detected on the computed variances which are closer to machine precision * by construction. * * @param array The array to normalize the zeros. */ private void handleZerosInScale(NumpyArray<Double> array) { double epsilon = 2.220446049250313e-16; // np.finfo(np.float64).eps final double threshold = 10 * epsilon; array.applyToEachElement(value -> { if (value < threshold) { return 1.0; } return value; }); } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { throw new ScikitLearnCoreException("The inverse transform is not available for the Normalizer preprocessing."); } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/QuantileTransformer.java
//// ================================================================== //// Inference for QuantileTransformer //// //// Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.QuantileTransformer.html#sklearn.preprocessing.QuantileTransformer //// ================================================================== //package ai.sklearn4j.preprocessing.data; // //import ai.sklearn4j.base.TransformerMixin; //import ai.sklearn4j.core.libraries.Scipy; //import ai.sklearn4j.core.libraries.numpy.NumpyArray; //import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; // ///** // * Transform features using quantiles information. // * This method transforms the features to follow a uniform or a normal // * distribution. Therefore, for a given feature, this transformation // * tends to spread out the most frequent values. It also reduces the // * impact of (marginal) outliers: this is therefore a robust // * preprocessing scheme. // * The transformation is applied on each feature independently. First an // * estimate of the cumulative distribution function of a feature is used // * to map the original values to a uniform distribution. The obtained // * values are then mapped to the desired output distribution using the // * associated quantile function. Features values of new/unseen data that // * fall below or above the fitted range will be mapped to the bounds of // * the output distribution. Note that this transform is non-linear. It // * may distort linear correlations between variables measured at the same // * scale but renders variables measured at different scales more directly // * comparable. // */ // //public class QuantileTransformer extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { // /** // * Instantiate a new object of QuantileTransformer. // */ // public QuantileTransformer() { // // } // // /** // * The actual number of quantiles used to discretize the cumulative // * distribution function. // */ // private long nQuantiles = 0; // // /** // * The values corresponding the quantiles of reference. // */ // private NumpyArray<Double> quantiles = null; // // /** // * Quantiles of references. // */ // private NumpyArray references = null; // // /** // * Number of features seen during `fit`. // */ // private long nFeaturesIn = 0; // // /** // * Names of features seen during `fit`. Defined only when `X` has feature // * names that are all strings. // */ // private String[] featureNamesIn = null; // // /** // * Internal field of scikit-learn object. // */ // private boolean ignoreImplicitZeros = false; // // /** // * Internal field of scikit-learn object. // */ // private String outputDistribution = null; // // /** // * Internal field of scikit-learn object. // */ // private long subsample = 0; // // /** // * Sets the The actual number of quantiles used to discretize the cumulative // * distribution function. // * // * @param value The new value for nQuantiles. // */ // public void setNQuantiles(long value) { // this.nQuantiles = value; // } // // // /** // * Gets the The actual number of quantiles used to discretize the cumulative // * distribution function. // */ // public long getNQuantiles() { // return this.nQuantiles; // } // // // /** // * Sets the The values corresponding the quantiles of reference. // * // * @param value The new value for quantiles. // */ // public void setQuantiles(NumpyArray value) { // this.quantiles = value; // } // // // /** // * Gets the The values corresponding the quantiles of reference. // */ // public NumpyArray getQuantiles() { // return this.quantiles; // } // // // /** // * Sets the Quantiles of references. // * // * @param value The new value for references. // */ // public void setReferences(NumpyArray value) { // this.references = value; // } // // // /** // * Gets the Quantiles of references. // */ // public NumpyArray getReferences() { // return this.references; // } // // // /** // * Sets the Number of features seen during `fit`. // * // * @param value The new value for nFeaturesIn. // */ // public void setNFeaturesIn(long value) { // this.nFeaturesIn = value; // } // // // /** // * Gets the Number of features seen during `fit`. // */ // public long getNFeaturesIn() { // return this.nFeaturesIn; // } // // // /** // * Sets the Names of features seen during `fit`. Defined only when `X` has feature // * names that are all strings. // * // * @param value The new value for featureNamesIn. // */ // public void setFeatureNamesIn(String[] value) { // this.featureNamesIn = value; // } // // // /** // * Gets the Names of features seen during `fit`. Defined only when `X` has feature // * names that are all strings. // */ // public String[] getFeatureNamesIn() { // return this.featureNamesIn; // } // // // /** // * Sets the value of IgnoreImplicitZeros // * // * @param value The new value for IgnoreImplicitZeros. // */ // public void setIgnoreImplicitZeros(boolean value) { // this.ignoreImplicitZeros = value; // } // // // /** // * Gets the value of IgnoreImplicitZeros // */ // public boolean getIgnoreImplicitZeros() { // return this.ignoreImplicitZeros; // } // // /** // * Sets the value of OutputDistribution // * // * @param value The new value for OutputDistribution. // */ // public void setOutputDistribution(String value) { // this.outputDistribution = value; // } // // // /** // * Gets the value of OutputDistribution // */ // public String getOutputDistribution() { // return this.outputDistribution; // } // // // /** // * Sets the value of Subsample // * // * @param value The new value for Subsample. // */ // public void setSubsample(long value) { // this.subsample = value; // } // // // /** // * Gets the value of Subsample // */ // public long getSubsample() { // return this.subsample; // } // // // @Override // public NumpyArray<Double> transform(NumpyArray<Double> array) { // return innerTransform(array, false); // } // // @Override // public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { // return innerTransform(array, true); // } // // private NumpyArray<Double> innerTransform(NumpyArray<Double> array, boolean inverse) { // NumpyArray<Double> result = NumpyArrayFactory.createArrayOfShapeAndTypeInfo(array); // // for (int columnIndex = 0; columnIndex < array.getShape()[1]; columnIndex++) { // transformColumn(result, array, columnIndex, inverse); // } // // return result; // } // // /** // * Private function to transform a single feature. // * // * @param result The array to store the transformation into. // * @param array The input array to transform. // * @param columnIndex The index of the column to apply transform. // * @param inverse Specify if it is a transform or inverseTransform method. // */ // private void transformColumn(NumpyArray<Double> result, NumpyArray<Double> array, int columnIndex, boolean inverse) { // String output_distribution = this.outputDistribution; // double lower_bound_x = 0; // double upper_bound_x = 0; // double lower_bound_y = 0; // double upper_bound_y = 0; // double[] X_col = extractColumn(array, columnIndex); // // if (!inverse) { // lower_bound_x = quantiles.get(0); // upper_bound_x = quantiles.get(quantiles.getShape()[0] - 1); // lower_bound_y = 0; // upper_bound_y = 1; // } else { // lower_bound_x = 0; // upper_bound_x = 1; // lower_bound_y = quantiles.get(0); // upper_bound_y = quantiles.get(quantiles.getShape()[0] - 1); // // // for inverse transform, match a uniform distribution // if ("normal".equals(outputDistribution)) { // X_col = Scipy.NormalDistribution.cumulativeDistributionFunction(X_col); // // else output distribution is already a uniform distribution // } // } // ///* // // # find index for lower and higher bounds // with np.errstate(invalid="ignore"): # hide NaN comparison warnings // if output_distribution == "normal": // lower_bounds_idx = X_col - BOUNDS_THRESHOLD < lower_bound_x // upper_bounds_idx = X_col + BOUNDS_THRESHOLD > upper_bound_x // if output_distribution == "uniform": // lower_bounds_idx = X_col == lower_bound_x // upper_bounds_idx = X_col == upper_bound_x // // isfinite_mask = ~np.isnan(X_col) // X_col_finite = X_col[isfinite_mask] // if not inverse: // # Interpolate in one direction and in the other and take the // # mean. This is in case of repeated values in the features // # and hence repeated quantiles // # // # If we don't do this, only one extreme of the duplicated is // # used (the upper when we do ascending, and the // # lower for descending). We take the mean of these two // X_col[isfinite_mask] = 0.5 * ( // np.interp(X_col_finite, quantiles, self.references_) // - np.interp(-X_col_finite, -quantiles[::-1], -self.references_[::-1]) // ) // else: // X_col[isfinite_mask] = np.interp(X_col_finite, self.references_, quantiles) // // X_col[upper_bounds_idx] = upper_bound_y // X_col[lower_bounds_idx] = lower_bound_y // # for forward transform, match the output distribution // if not inverse: // with np.errstate(invalid="ignore"): # hide NaN comparison warnings // if output_distribution == "normal": // X_col = stats.norm.ppf(X_col) // # find the value to clip the data to avoid mapping to // # infinity. Clip such that the inverse transform will be // # consistent // clip_min = stats.norm.ppf(BOUNDS_THRESHOLD - np.spacing(1)) // clip_max = stats.norm.ppf(1 - (BOUNDS_THRESHOLD - np.spacing(1))) // X_col = np.clip(X_col, clip_min, clip_max) // # else output distribution is uniform and the ppf is the // # identity function so we let X_col unchanged // // return X_col // */ // } // // private double[] extractColumn(NumpyArray<Double> array, int columnIndex) { // double[] X_col = new double[array.getShape()[0]]; // to get from the result // for (int i = 0; i < array.getShape()[1]; i++) { // X_col[i] = array.get(i, columnIndex); // } // return X_col; // } // //}
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/RobustScaler.java
// ================================================================== // Inference for RobustScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.RobustScaler.html#sklearn.preprocessing.RobustScaler // ================================================================== package ai.sklearn4j.preprocessing.data; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; /** * Scale features using statistics that are robust to outliers. * This Scaler removes the median and scales the data according to the * quantile range (defaults to IQR: Interquartile Range). The IQR is the * range between the 1st quartile (25th quantile) and the 3rd quartile * (75th quantile). * Centering and scaling happen independently on each feature by * computing the relevant statistics on the samples in the training set. * Median and interquartile range are then stored to be used on later * data using the :meth:`transform` method. * Standardization of a dataset is a common requirement for many machine * learning estimators. Typically this is done by removing the mean and * scaling to unit variance. However, outliers can often influence the * sample mean / variance in a negative way. In such cases, the median * and the interquartile range often give better results. * .. versionadded:: 0.17 */ public class RobustScaler extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { /** * Instantiate a new object of RobustScaler. */ public RobustScaler() { } /** * The median value for each feature in the training set. */ private NumpyArray center = null; /** * The (scaled) interquartile range for each feature in the training * set. */ private NumpyArray scale = null; /** * If True, center the data before scaling. */ private boolean withCentering = true; /** * If True, scale the data to interquartile range. */ private boolean withScaling = true; /** * If True, scale data so that normally distributed features have a variance of 1. In general, * if the difference between the x-values of q_max and q_min for a standard normal distribution * is greater than 1, the dataset will be scaled down. If less than 1, the dataset will be scaled * up. */ private boolean unitVariance = true; /** * Quantile range used to calculate scale_. By default this is equal to the IQR, i.e., q_min is * the first quantile and q_max is the third quantile. */ private double[] quantilesRange = null; /** * Number of features seen during `fit`. */ private long nFeaturesIn = 0; /** * Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ private String[] featureNamesIn = null; /** * Sets the The median value for each feature in the training set. * * @param value The new value for center. */ public void setCenter(NumpyArray value) { this.center = value; } /** * Gets the The median value for each feature in the training set. */ public NumpyArray getCenter() { return this.center; } /** * Sets the The (scaled) interquartile range for each feature in the training * set. * * @param value The new value for scale. */ public void setScale(NumpyArray value) { this.scale = value; } /** * Gets the The (scaled) interquartile range for each feature in the training * set. */ public NumpyArray getScale() { return this.scale; } /** * Sets the Number of features seen during `fit`. * * @param value The new value for nFeaturesIn. */ public void setNFeaturesIn(long value) { this.nFeaturesIn = value; } /** * Gets the Number of features seen during `fit`. */ public long getNFeaturesIn() { return this.nFeaturesIn; } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param value The new value for featureNamesIn. */ public void setFeatureNamesIn(String[] value) { this.featureNamesIn = value; } /** * Gets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ public String[] getFeatureNamesIn() { return this.featureNamesIn; } /** * Gets if the transformer centers the data before scaling. * @return If True, center the data before scaling. */ public boolean isWithCentering() { return withCentering; } /** * Sets if the transformer centers the data before scaling. * @param withCentering True for centering before scaling. */ public void setWithCentering(boolean withCentering) { this.withCentering = withCentering; } /** * Gets if the transformer scale the data to interquartile range. * * @return If True, scale the data to interquartile range. */ public boolean isWithScaling() { return withScaling; } /** * Sets if the transformer scale the data to interquartile range. * * @param withScaling True to scale the data to interquartile range. */ public void setWithScaling(boolean withScaling) { this.withScaling = withScaling; } /** * Gets if scale data so that normally distributed features have a variance of 1. * * @return True if scale data so that normally distributed features have a variance of 1, * otherwise false. */ public boolean isUnitVariance() { return unitVariance; } /** * Sets if scale data so that normally distributed features have a variance of 1. * @param unitVariance True to scale, otherwise false. */ public void setUnitVariance(boolean unitVariance) { this.unitVariance = unitVariance; } /** * Gets the quantile range used to calculate scale. * @return The rage of the quantile. */ public double[] getQuantilesRange() { return quantilesRange; } /** * Sets the quantile range used to calculate scale * @param quantilesRange A double[] array specifying the min and max of the range. */ public void setQuantilesRange(double[] quantilesRange) { this.quantilesRange = quantilesRange; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Double> transform(NumpyArray<Double> array) { NumpyArray<Double> result = array; if (withCentering) { result = Numpy.subtract(result, center); } if (withScaling) { result = Numpy.divide(result, scale); } return result; } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { NumpyArray<Double> result = array; if (withScaling) { result = Numpy.multiply(result, scale); } if (withCentering) { result = Numpy.add(result, center); } return result; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/data/StandardScaler.java
// ================================================================== // Inference for StandardScaler // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler // ================================================================== package ai.sklearn4j.preprocessing.data; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; /** * Standardize features by removing the mean and scaling to unit * variance. * The standard score of a sample `x` is calculated as: * z = (x - u) / s * where `u` is the mean of the training samples or zero if * `with_mean=False`, and `s` is the standard deviation of the training * samples or one if `with_std=False`. * Centering and scaling happen independently on each feature by * computing the relevant statistics on the samples in the training set. * Mean and standard deviation are then stored to be used on later data * using :meth:`transform`. * Standardization of a dataset is a common requirement for many machine * learning estimators: they might behave badly if the individual * features do not more or less look like standard normally distributed * data (e.g. Gaussian with 0 mean and unit variance). * For instance many elements used in the objective function of a * learning algorithm (such as the RBF kernel of Support Vector Machines * or the L1 and L2 regularizers of linear models) assume that all * features are centered around 0 and have variance in the same order. If * a feature has a variance that is orders of magnitude larger than * others, it might dominate the objective function and make the * estimator unable to learn from other features correctly as expected. * This scaler can also be applied to sparse CSR or CSC matrices by * passing `with_mean=False` to avoid breaking the sparsity structure of * the data. */ public class StandardScaler extends TransformerMixin<NumpyArray<Double>, NumpyArray<Double>> { /** * Instantiate a new object of StandardScaler. */ public StandardScaler() { } /** * Per feature relative scaling of the data to achieve zero mean and unit * variance. Generally this is calculated using `np.sqrt(var_)`. If a * variance is zero, we can't achieve unit variance, and the data is left * as-is, giving a scaling factor of 1. `scale_` is equal to `None` when * `with_std=False`. */ private NumpyArray<Double> scale = null; /** * The mean value for each feature in the training set. Equal to `None` * when `with_mean=False`. */ private NumpyArray<Double> mean = null; /** * The variance for each feature in the training set. Used to compute * `scale_`. Equal to `None` when `with_std=False`. */ private NumpyArray<Double> variance = null; /** * Number of features seen during `fit`. */ private long nFeaturesIn = 0; /** * Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ private String[] featureNamesIn = null; /** * The number of samples processed by the estimator for each feature. If * there are no missing samples, the `n_samples_seen` will be an integer, * otherwise it will be an array of dtype int. If `sample_weights` are * used it will be a float (if no missing data) or an array of dtype * float that sums the weights seen so far. Will be reset on new calls to * fit, but increments across `partial_fit` calls. */ private NumpyArray<Long> nSamplesSeen = null; /** * Internal field of scikit-learn object. */ private boolean withMean = true; /** * Internal field of scikit-learn object. */ private boolean withStd = true; /** * Sets the Per feature relative scaling of the data to achieve zero mean and unit * variance. Generally this is calculated using `np.sqrt(var_)`. If a * variance is zero, we can't achieve unit variance, and the data is left * as-is, giving a scaling factor of 1. `scale_` is equal to `None` when * `with_std=False`. * * @param value The new value for scale. */ public void setScale(NumpyArray<Double> value) { this.scale = value; } /** * Gets the Per feature relative scaling of the data to achieve zero mean and unit * variance. Generally this is calculated using `np.sqrt(var_)`. If a * variance is zero, we can't achieve unit variance, and the data is left * as-is, giving a scaling factor of 1. `scale_` is equal to `None` when * `with_std=False`. */ public NumpyArray<Double> getScale() { return this.scale; } /** * Sets the The mean value for each feature in the training set. Equal to `None` * when `with_mean=False`. * * @param value The new value for mean. */ public void setMean(NumpyArray<Double> value) { this.mean = value; } /** * Gets the The mean value for each feature in the training set. Equal to `None` * when `with_mean=False`. */ public NumpyArray<Double> getMean() { return this.mean; } /** * Sets the The variance for each feature in the training set. Used to compute * `scale_`. Equal to `None` when `with_std=False`. * * @param value The new value for var. */ public void setVariance(NumpyArray<Double> value) { this.variance = value; } /** * Gets the The variance for each feature in the training set. Used to compute * `scale_`. Equal to `None` when `with_std=False`. */ public NumpyArray<Double> getVariance() { return this.variance; } /** * Sets the Number of features seen during `fit`. * * @param value The new value for nFeaturesIn. */ public void setNFeaturesIn(long value) { this.nFeaturesIn = value; } /** * Gets the Number of features seen during `fit`. */ public long getNFeaturesIn() { return this.nFeaturesIn; } /** * Sets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. * * @param value The new value for featureNamesIn. */ public void setFeatureNamesIn(String[] value) { this.featureNamesIn = value; } /** * Gets the Names of features seen during `fit`. Defined only when `X` has feature * names that are all strings. */ public String[] getFeatureNamesIn() { return this.featureNamesIn; } /** * Sets the The number of samples processed by the estimator for each feature. If * there are no missing samples, the `n_samples_seen` will be an integer, * otherwise it will be an array of dtype int. If `sample_weights` are * used it will be a float (if no missing data) or an array of dtype * float that sums the weights seen so far. Will be reset on new calls to * fit, but increments across `partial_fit` calls. * * @param value The new value for nSamplesSeen. */ public void setNSamplesSeen(NumpyArray<Long> value) { this.nSamplesSeen = value; } /** * Gets the The number of samples processed by the estimator for each feature. If * there are no missing samples, the `n_samples_seen` will be an integer, * otherwise it will be an array of dtype int. If `sample_weights` are * used it will be a float (if no missing data) or an array of dtype * float that sums the weights seen so far. Will be reset on new calls to * fit, but increments across `partial_fit` calls. */ public NumpyArray<Long> getNSamplesSeen() { return this.nSamplesSeen; } /** * Sets the value of WithMean * * @param value The new value for WithMean. */ public void setWithMean(boolean value) { this.withMean = value; } /** * Gets the value of WithMean */ public boolean getWithMean() { return this.withMean; } /** * Sets the value of WithStd * * @param value The new value for WithStd. */ public void setWithStandardDeviation(boolean value) { this.withStd = value; } /** * Gets the value of WithStd */ public boolean getWithStandardDeviation() { return this.withStd; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Double> transform(NumpyArray<Double> array) { NumpyArray<Double> result = array; if (withMean) { result = Numpy.subtract(result, mean); } if (withStd) { result = Numpy.divide(result, scale); } return result; } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public NumpyArray<Double> inverseTransform(NumpyArray<Double> array) { NumpyArray<Double> result = array; if (withStd) { result = Numpy.multiply(result, scale); } if (withMean) { result = Numpy.add(result, mean); } return result; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/label/LabelBinarizer.java
// ================================================================== // Inference for LabelBinarizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.LabelBinarizer.html // ================================================================== package ai.sklearn4j.preprocessing.label; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.ScikitLearnFeatureNotImplementedException; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Binarize labels in a one-vs-all fashion. * Several regression and binary classification algorithms are available * in scikit-learn. A simple way to extend these algorithms to the * multi-class classification case is to use the so-called one-vs-all * scheme. * At learning time, this simply consists in learning one regressor or * binary classifier per class. In doing so, one needs to convert * multi-class labels to binary labels (belong or does not belong to the * class). LabelBinarizer makes this process easy with the transform * method. * At prediction time, one assigns the class for which the corresponding * model gave the greatest confidence. LabelBinarizer makes this easy * with the inverse_transform method. */ public class LabelBinarizer extends TransformerMixin<List<Object>, NumpyArray<Long>> { /** * Constant value for y_type binary. */ private static final String Y_TYPE_BINARY = "binary"; /** * Constant value for y_type multi class. */ private static final String Y_TYPE_MULTICLASS = "multiclass"; /** * Instantiate a new object of LabelBinarizer. */ public LabelBinarizer() { } /** * Holds the label for each class. */ private List<Object> classes = null; /** * Represents the type of the target data as evaluated by * utils.multiclass.type_of_target. Possible type are 'continuous', * 'continuous-multioutput', 'binary', 'multiclass', * 'multiclass-multioutput', 'multilabel-indicator', and 'unknown'. */ private String yType = null; /** * Internal field of scikit-learn object. */ private long negativeLabel = 0; /** * Internal field of scikit-learn object. */ private long positiveLabel = 1; /** * Sets the Holds the label for each class. * * @param value The new value for classes. */ public void setClasses(List<Object> value) { this.classes = value; } /** * Gets the Holds the label for each class. */ public List<Object> getClasses() { return this.classes; } /** * Sets the Represents the type of the target data as evaluated by * utils.multiclass.type_of_target. Possible type are 'continuous', * 'continuous-multioutput', 'binary', 'multiclass', * 'multiclass-multioutput', 'multilabel-indicator', and 'unknown'. * * @param value The new value for yType. */ public void setYType(String value) { this.yType = value; } /** * Gets the Represents the type of the target data as evaluated by * utils.multiclass.type_of_target. Possible type are 'continuous', * 'continuous-multioutput', 'binary', 'multiclass', * 'multiclass-multioutput', 'multilabel-indicator', and 'unknown'. */ public String getYType() { return this.yType; } /** * Sets the value of NegLabel * * @param value The new value for NegLabel. */ public void setNegativeLabel(long value) { this.negativeLabel = value; } /** * Gets the value of NegLabel */ public long getNegativeLabel() { return this.negativeLabel; } /** * Sets the value of PosLabel * * @param value The new value for PosLabel. */ public void setPositiveLabel(long value) { this.positiveLabel = value; } /** * Gets the value of PosLabel */ public long getPositiveLabel() { return this.positiveLabel; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Long> transform(List<Object> array) { if (yType.equals(Y_TYPE_BINARY)) { return transformBinary(array); } else if (yType.equals(Y_TYPE_MULTICLASS)) { return transformMulticlass(array); } else { throw new ScikitLearnFeatureNotImplementedException(String.format("The yType=%s in LabelBinarizer is not implemented.", yType)); } } /** * Transforms a list of labels into a binary format. Since there are only two possible * values, the length of the encoded is 1. * * @param array The input label list to binarize. * @return The transformed array. */ private NumpyArray<Long> transformBinary(List<Object> array) { Map<Object, Integer> mapper = new HashMap<>(); for (Object cls : classes) { mapper.put(cls, mapper.size()); } NumpyArray<Long> result = NumpyArrayFactory.arrayOfInt64WithShape(new int[]{array.size(), 1}); int i = 0; for (Object obj : array) { if (mapper.containsKey(obj)) { int index = mapper.get(obj); result.set(index == 0 ? negativeLabel : positiveLabel, i, 0); i++; } else { throw new ScikitLearnCoreException(String.format("The class '%s' was not defined during the LabelEncoder training.", obj.toString())); } } return result; } /** * Transforms a list of labels into a multiclass format. Since there are multiple possible * values, the length of the encoded is the number of classes, but only one of them is 1. * * @param array The input label list to binarize. * @return The transformed array. */ private NumpyArray<Long> transformMulticlass(List<Object> array) { Map<Object, Integer> mapper = new HashMap<>(); for (Object cls : classes) { mapper.put(cls, mapper.size()); } NumpyArray<Long> result = NumpyArrayFactory.arrayOfInt64WithShape(new int[]{array.size(), classes.size()}); int i = 0; int classCount = classes.size(); for (Object obj : array) { if (mapper.containsKey(obj)) { int index = mapper.get(obj); for (int j = 0; j < classCount; j++) { if (j == index) { result.set(positiveLabel, i, j); } else { result.set(negativeLabel, i, j); } } i++; } else { throw new ScikitLearnCoreException(String.format("The class '%s' was not defined during the LabelEncoder training.", obj.toString())); } } return result; } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public List<Object> inverseTransform(NumpyArray<Long> array) { if (yType.equals(Y_TYPE_BINARY)) { return inverseTransformBinary(array); } else if (yType.equals(Y_TYPE_MULTICLASS)) { return inverseTransformMulticlass(array); } else { throw new ScikitLearnFeatureNotImplementedException(String.format("The yType=%s in LabelBinarizer is not implemented.", yType)); } } /** * Reverse the transformation on a binary encoded label column. * * @param array The binary encoded labels. * @return List of object that better represents the labels. */ private List<Object> inverseTransformBinary(NumpyArray<Long> array) { List<Object> result = new ArrayList<>(); long[][] values = (long[][]) array.getWrapper().getRawArray(); for (int i = 0; i < values.length; i++) { int cls = (int) values[i][0]; result.add(cls == negativeLabel ? classes.get(0) : classes.get(1)); } return result; } /** * Reverse the transformation on a multiclass encoded label column. * * @param array The multiclass encoded labels. * @return List of object that better represents the labels. */ private List<Object> inverseTransformMulticlass(NumpyArray<Long> array) { List<Object> result = new ArrayList<>(); long[][] values = (long[][]) array.getWrapper().getRawArray(); for (int i = 0; i < values.length; i++) { int cls = getPositiveLabelIndex(values[i]); if (cls < 0 || cls >= classes.size()) { throw new ScikitLearnCoreException(String.format("The class '%d' is not in valid range.", cls)); } else { result.add(classes.get(cls)); } } return result; } /** * Gets which index holds the class presence. This only works for multiclass binarized columns. * @param value The binarized value. * @return Index of the class. */ private int getPositiveLabelIndex(long[] value) { int result = -1; for (int i = 0; i < value.length; i++) { if (value[i] == positiveLabel) { result = i; break; } } return result; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/label/LabelEncoder.java
// ================================================================== // Inference for LabelEncoder // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.LabelEncoder.html#sklearn.preprocessing.LabelEncoder // ================================================================== package ai.sklearn4j.preprocessing.label; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Encode target labels with value between 0 and n_classes-1. * This transformer should be used to encode target values, *i.e.* `y`, * and not the input `X`. */ public class LabelEncoder extends TransformerMixin<List<Object>, NumpyArray<Long>> { /** * Instantiate a new object of LabelEncoder. */ public LabelEncoder() { } /** * Holds the label for each class. */ private List<Object> classes = null; /** * Sets the label for each class. * * @param value The new value for classes. */ public void setClasses(List<Object> value) { this.classes = value; } /** * Gets the label for each class. */ public List<Object> getClasses() { return this.classes; } /** * Transform labels to normalized encoding. * * @param array array-like of shape (n_samples,) Target values. * @return array-like of shape (n_samples,) Labels as normalized encodings. */ @Override public NumpyArray<Long> transform(List<Object> array) { Map<Object, Long> mapper = new HashMap<>(); for (Object cls : classes) { mapper.put(cls, (long) mapper.size()); } long[] result = new long[array.size()]; int i = 0; for (Object obj : array) { if (mapper.containsKey(obj)) { result[i] = mapper.get(obj); i++; } else { throw new ScikitLearnCoreException(String.format("The class '%s' was not defined during the LabelEncoder training.", obj.toString())); } } return NumpyArrayFactory.from(result); } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public List<Object> inverseTransform(NumpyArray<Long> array) { List<Object> result = new ArrayList<>(); long[] values = (long[]) array.getWrapper().getRawArray(); for (int i = 0; i < values.length; i++) { int cls = (int) values[i]; if (cls < 0 || cls >= classes.size()) { throw new ScikitLearnCoreException(String.format("The class '%d' is not in valid range.", cls)); } else { result.add(classes.get(cls)); } } return result; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/preprocessing/label/MultiLabelBinarizer.java
// ================================================================== // Inference for MultiLabelBinarizer // // Scaffolded from: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MultiLabelBinarizer.html // ================================================================== package ai.sklearn4j.preprocessing.label; import ai.sklearn4j.base.TransformerMixin; import ai.sklearn4j.core.ScikitLearnCoreException; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; import java.util.*; /** * Transform between iterable of iterables and a multilabel format. * Although a list of sets or tuples is a very intuitive format for * multilabel data, it is unwieldy to process. This transformer converts * between this intuitive format and the supported multilabel format: a * (samples x classes) */ public class MultiLabelBinarizer extends TransformerMixin<List<Set<Object>>, NumpyArray<Long>> { /** * Instantiate a new object of MultiLabelBinarizer. */ public MultiLabelBinarizer() { } /** * A copy of the `classes` parameter when provided. Otherwise it * corresponds to the sorted set of classes found when fitting. */ private List<Object> classes = null; /** * Internal field of scikit-learn object. */ private Map<String, Object> cachedDict = null; /** * Sets the A copy of the `classes` parameter when provided. Otherwise it * corresponds to the sorted set of classes found when fitting. * * @param value The new value for classes. */ public void setClasses(List<Object> value) { this.classes = value; } /** * Gets the A copy of the `classes` parameter when provided. Otherwise it * corresponds to the sorted set of classes found when fitting. */ public List<Object> getClasses() { return this.classes; } /** * Sets the value of CachedDict * * @param value The new value for CachedDict. */ public void setCachedDict(Map<String, Object> value) { this.cachedDict = value; } /** * Gets the value of CachedDict */ public Map<String, Object> getCachedDict() { return this.cachedDict; } /** * Takes the input array and transforms it. * * @param array The array to transform. * @return The transformed array. */ @Override public NumpyArray<Long> transform(List<Set<Object>> array) { Map<Object, Long> mapper = new HashMap<>(); for (int i = 0; i < classes.size(); i++) { mapper.put(classes.get(i), (long) i); } NumpyArray<Long> result = NumpyArrayFactory.arrayOfInt64WithShape(new int[]{array.size(), classes.size()}); for (int i = 0; i < array.size(); i++) { Set<Object> labels = array.get(i); for (Object label : labels) { if (mapper.containsKey(label)) { long index = mapper.get(label); result.set(1, i, (int) index); } else { throw new ScikitLearnCoreException(String.format("The class '%s' was not defined during the MultiLabelBinarizer training.", label.toString())); } } } return result; } /** * Takes a transformed array and reveres the transformation. * * @param array The array to apply reveres transform. * @return The inversed transform of array. */ @Override public List<Set<Object>> inverseTransform(NumpyArray<Long> array) { List<Set<Object>> result = new ArrayList<>(); for (int i = 0; i < array.getShape()[0]; i++) { Set<Object> labels = new HashSet<>(); result.add(labels); for (int j = 0; j < classes.size(); j++) { long value = array.get(i, j); if (value != 0) { labels.add(classes.get(j)); } } } return result; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/utils/ExtMath.java
package ai.sklearn4j.utils; import ai.sklearn4j.core.ScikitLearnFeatureNotImplementedException; import ai.sklearn4j.core.libraries.numpy.Numpy; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; /** * Helper class that implements scikit-learn functionalities in utils/extmath.py. */ public class ExtMath { /** * Dot product of the NumpyArray. * * @param v1 Left-hand side of the expression. * @param v2 Right-hand side of the expression. * @return The dot product of the two numpy array. */ public static NumpyArray<Double> dot(NumpyArray<Double> v1, NumpyArray<Double> v2) { if (v1.numberOfDimensions() == v2.numberOfDimensions() && v1.numberOfDimensions() == 2) { // Just do a regular matrix multiplication double[][] result = new double[v1.getShape()[0]][v2.getShape()[1]]; for (int i = 0; i < v1.getShape()[0]; i++) { for (int j = 0; j < v2.getShape()[1]; j++) { for (int k = 0; k < v1.getShape()[1]; k++) { result[i][j] += v1.get(i, k) * v2.get(k, j); } } } return NumpyArrayFactory.from(result); } throw new ScikitLearnFeatureNotImplementedException(); } /** * Row-wise (squared) Euclidean norm of X. * Equivalent to np.sqrt((X * X).sum(axis=1)) * * @param x The input array. * @return The row-wise (squared) Euclidean norm of x. */ public static NumpyArray<Double> rowNorm(NumpyArray<Double> x) { return rowNorm(x, false); } /** * Row-wise (squared) Euclidean norm of X. * Equivalent to np.sqrt((X * X).sum(axis=1)) * * @param x The input array. * @param squared If True, return squared norms. * @return The row-wise (squared) Euclidean norm of x. */ public static NumpyArray<Double> rowNorm(NumpyArray<Double> x, boolean squared) { NumpyArray<Double> tmp = Numpy.multiply(x, x); tmp = Numpy.sum(tmp, 1, false); if (!squared) { tmp = Numpy.sqrt(tmp); } return tmp; } }
0
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j
java-sources/ai/scikitlearn4x/sklearn4jvm/0.0.4/ai/sklearn4j/utils/Preprocessings.java
package ai.sklearn4j.utils; import ai.sklearn4j.core.libraries.numpy.NumpyArray; import ai.sklearn4j.core.libraries.numpy.NumpyArrayFactory; /** * Utils method that scikit-learn provide to preprocess the data. */ public class Preprocessings { /** * Binarize a numpy array based on a given threshold. * * @param x Array to be binarized. * @param threshold The threshold for binarization. If the value in x is greater than threshold, * the target element is 1.0 otherwise is 0.0. * @return The binarized numpy array. */ public static NumpyArray<Double> binarizeInput(NumpyArray<Double> x, double threshold) { NumpyArray<Double> result = NumpyArrayFactory.arrayOfDoubleWithShape(x.getShape()); x.applyToEachElementAnsSaveToTarget(result, value -> { if (value > threshold) { return 1.0; } return 0.0; }); return result; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/Sensfrx.java
package ai.sensfrx; import android.annotation.SuppressLint; import android.app.Application; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.os.Build; import android.view.MotionEvent; import android.view.View; import ai.sensfrx.em.EventQueueManager; import ai.sensfrx.data.MainViewModel; import ai.sensfrx.data.model.ClickEvent; import ai.sensfrx.data.model.Event; import ai.sensfrx.data.model.LocationInfo; import ai.sensfrx.df.AppInfo; import ai.sensfrx.df.BatteryInformation; import ai.sensfrx.df.CpuInformation; import ai.sensfrx.df.DeviceInformation; import ai.sensfrx.df.NetworkInformation; import ai.sensfrx.df.RootDetector; import ai.sensfrx.df.StorageInformation; import ai.sensfrx.df.SystemInformation; import ai.sensfrx.em.DeviceCoordinate; import ai.sensfrx.em.LocationEvent; import ai.sensfrx.em.TrackAllEvents; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import ai.sensfrx.utils.AppCloneDetector; import ai.sensfrx.utils.AppDataUsageHelper; import ai.sensfrx.utils.AppSignature; import ai.sensfrx.utils.Constants; import ai.sensfrx.utils.EncodeSecret; import ai.sensfrx.utils.SensfrxLogger; import ai.sensfrx.utils.StringCompressor; import ai.sensfrx.utils.Utils; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.List; public class Sensfrx { @SuppressLint("StaticFieldLeak") private static Sensfrx sensFRX; private Context mContext; private int appVersionCode; private String appVersionName; private static SessionManager sessionManager; private static LocationEvent locationEvent; private static DeviceCoordinate deviceCoordinate; private SensfrxConfiguration senseFRXConfiguration; private SensfrxActivityLifecycleCallbacks activityLifecycleCallbacks; private static EventQueueManager eventQueueManager; private static JsonObject deviceLogs; private static String uId = null; static byte[] userAppsStringCompressedData; static byte[] systemAppsStringCompressedData; private static String application_languages = ""; private Sensfrx(Application application, SensfrxConfiguration senseFRXConfiguration) { init(application, senseFRXConfiguration); } private void init(Application application, SensfrxConfiguration configuration) { mContext = application.getApplicationContext(); appVersionCode = AppInfo.getAppVersionCode(application); appVersionName = AppInfo.getAppVersionName(application); eventQueueManager = new EventQueueManager(mContext); deviceCoordinate = DeviceCoordinate.getInstance(mContext); sessionManager = SessionManager.getInstance(mContext); this.senseFRXConfiguration = configuration; //SensfrxLogger.d("After encoding: "+AppSignature.generateSignature("token")); //String hash = AppSignature.generateSignature("token"); //SensfrxLogger.d("After decoding: "+AppSignature.decodeSignature(hash)); } private void registerLifeCycleCallbacks(Application application) { activityLifecycleCallbacks = new SensfrxActivityLifecycleCallbacks(); application.registerActivityLifecycleCallbacks(activityLifecycleCallbacks); SensfrxAppLifecycleCallbacks lifecycleTracker = new SensfrxAppLifecycleCallbacks(); application.registerActivityLifecycleCallbacks(lifecycleTracker); AppInfo.getAppState(sessionManager.getVersionCode(), appVersionCode); logDeviceFingerprints(application.getApplicationContext()); sessionManager.setVersionName(appVersionName); sessionManager.setVersionCode(appVersionCode); } public static void reGenerateToken() { logDeviceFingerprints(sensFRX.mContext); } public static void configure(Application application, boolean sandboxMode, String secretKey) { if (sensFRX == null) { if (secretKey == null) { throw new RuntimeException("Provide a valid Sensfrx Secret and Property id while initializing the SDK."); } Constants.SANDBOX = sandboxMode; Constants.SECRET_KEY = EncodeSecret.encodeSecret(secretKey+":ym6gjj"); sensFRX = new Sensfrx(application, new SensfrxConfiguration.Builder().secretKey(secretKey+":ym6gjj").build()); sensFRX.registerLifeCycleCallbacks(application); } } public static String getSecretKeyFromManifest() { return "auth_safe_secret_key"; } /* public static void configure(Application application) { try { ApplicationInfo applicationInfo = application.getPackageManager().getApplicationInfo(application.getPackageName(), PackageManager.GET_META_DATA); Bundle bundle = applicationInfo.metaData; String secretKey = bundle.getString("auth_safe_secret_key"); Constants.SECRET_KEY = EncodeSecret.encodeSecret(secretKey); configure(application, new SensfrxConfiguration.Builder().secretKey(secretKey).build()); } catch (PackageManager.NameNotFoundException e) { SensfrxLogger.e("Failed to load meta-data, NameNotFound: " + e.getMessage()); } catch (NullPointerException e) { SensfrxLogger.e("Failed to load meta-data, Please check your Secret and Property id, NullPointer: " + e.getMessage()); } } */ public static void configure(Application application, SensfrxConfiguration configuration) { if (sensFRX == null) { if (configuration.secretKey() == null || configuration.secretKey().equalsIgnoreCase("")) { throw new RuntimeException("Provide a valid Sensfrx Secret and Property id while initializing the SDK."); } sensFRX = new Sensfrx(application, configuration); sensFRX.registerLifeCycleCallbacks(application); } } public static void addApplicationLanguages(String languages) { application_languages = languages; } //when app is open public static void logDeviceFingerprints(Context mContext) { DeviceInformation deviceInformation = new DeviceInformation(mContext); SystemInformation systemInformation = new SystemInformation(mContext); CpuInformation cpuInfo = new CpuInformation(mContext); BatteryInformation batteryInfo = new BatteryInformation(mContext); StorageInformation storageInfo = new StorageInformation(mContext); NetworkInformation networkInfo = new NetworkInformation(mContext); JsonObject jsonObject = new JsonObject(); try { try { jsonObject.addProperty("ex_storage", storageInfo.isOnExternalStorage(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("ex_storage", "0"); } try { jsonObject.addProperty("mock_status", deviceInformation.isMockLocationEnabled() ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("mock_status", "0"); } try { jsonObject.addProperty("app_path", AppCloneDetector.create(mContext).getAppPath()); } catch (Exception e) { jsonObject.addProperty("app_path", ""); } try { jsonObject.addProperty("app_clone", AppCloneDetector.create(mContext).isAppCloned() ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("app_clone", "0"); } try { jsonObject.addProperty("clone_env", AppCloneDetector.create(mContext).isRunningInClonedEnvironment() ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("clone_env", "0"); } try { jsonObject.addProperty("mock_app", deviceInformation.getAppsWithMockPermission(mContext)); } catch (Exception e) { jsonObject.addProperty("mock_app", ""); } try { jsonObject.addProperty("night_mode", deviceInformation.getNightModeStatus()); } catch (Exception e) { jsonObject.addProperty("night_mode", ""); } try { jsonObject.addProperty("screen_brightness", deviceInformation.getScreenBrightness()); } catch (Exception e) { jsonObject.addProperty("screen_brightness", ""); } try { jsonObject.addProperty("power_saving", deviceInformation.isPowerSavingModeEnabled(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("power_saving", ""); } try { jsonObject.addProperty("dnd_mode", deviceInformation.isDNDModeEnabled(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("dnd_mode", "0"); } try { jsonObject.addProperty("volume_levels", deviceInformation.getAllVolumeLevelsInfo()); } catch (Exception e) { jsonObject.addProperty("volume_levels", ""); } try { jsonObject.addProperty("dual_sim", deviceInformation.isDualSim(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("dual_sim", "0"); } try { jsonObject.addProperty("bluetooth", deviceInformation.checkBluetoothStatus()); } catch (Exception e) { jsonObject.addProperty("bluetooth", ""); } try { jsonObject.addProperty("ampm_format", deviceInformation.getAMPMFormat(mContext)); } catch (Exception e) { jsonObject.addProperty("ampm_format", ""); } try { jsonObject.addProperty("lock_type", deviceInformation.getLockTypeString(mContext)); } catch (Exception e) { jsonObject.addProperty("lock_type", ""); } try { jsonObject.addProperty("sim_card_locked", deviceInformation.isSimCardLocked(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("sim_card_locked", "0"); } AppDataUsageHelper appDataUsageHelper = new AppDataUsageHelper(mContext); jsonObject.addProperty("call_status", SessionManager.getInstance(mContext).getCallStatus()); jsonObject.addProperty("p_list", deviceInformation.getManifestPermissions(mContext.getPackageName())); jsonObject.addProperty("d_type", deviceInformation.isEmulatorOrRealDevice() ? "1" : "0"); jsonObject.addProperty("app_ln", application_languages); jsonObject.addProperty("dev_mode", deviceInformation.isDeveloperModeEnabled() ? "1" : "0"); jsonObject.addProperty("rxd", appDataUsageHelper.getReceivedDataUsage()); jsonObject.addProperty("txd", appDataUsageHelper.getTransmittedDataUsage()); jsonObject.addProperty("tz", deviceInformation.getDefaultTimeZoneOffsetInHours()); jsonObject.addProperty("vpn", deviceInformation.isVpnConnected(mContext) ? "1" : "0"); jsonObject.addProperty("model", deviceInformation.getModel()); jsonObject.addProperty("d_mf", deviceInformation.getDeviceManufacturer()); jsonObject.addProperty("d_name", deviceInformation.getDeviceName()); jsonObject.addProperty("b_name", deviceInformation.getBuildBrand()); jsonObject.addProperty("pc", deviceInformation.getProcessor()); jsonObject.addProperty("ln", deviceInformation.getLanguage()); jsonObject.addProperty("r_mode", deviceInformation.getDeviceRingerMode()); jsonObject.addProperty("b_fingerprint", deviceInformation.getFingerprint()); jsonObject.addProperty("d_locale", deviceInformation.getDeviceLocale()); jsonObject.addProperty("ad_ver", systemInformation.getOSVersion()); jsonObject.addProperty("api_level", systemInformation.getSdkVersion()); jsonObject.addProperty("spl", systemInformation.getSecurityPatchLevel()); jsonObject.addProperty("b_number", systemInformation.getBuildHost()); jsonObject.addProperty("base_ver", systemInformation.getBaseBandVersion()); jsonObject.addProperty("vm", systemInformation.getVirtualMachine()); if (RootDetector.isRooted()) { jsonObject.addProperty("root", "1"); } else { jsonObject.addProperty("root", "0"); } jsonObject.addProperty("kernel", systemInformation.getKernelVersion()); jsonObject.addProperty("opengl_es", systemInformation.getOpenGlVersion(mContext)); jsonObject.addProperty("s_up_time", systemInformation.getSystemUpTime()); jsonObject.addProperty("tb", systemInformation.getTreble()); jsonObject.addProperty("soc", cpuInfo.getSOC()); jsonObject.addProperty("processor", cpuInfo.getProcessor()); jsonObject.addProperty("abi", cpuInfo.getCpuAbi()); if (cpuInfo.getBogoMIPS().isEmpty()) { jsonObject.addProperty("mips", "0"); } else { jsonObject.addProperty("mips", cpuInfo.getBogoMIPS()); } jsonObject.addProperty("cp_gov", cpuInfo.getCPUGovernor(0)); jsonObject.addProperty("cp_type", cpuInfo.getCpuArchitecture()); jsonObject.addProperty("core", cpuInfo.getCpuNumOfCores()); jsonObject.addProperty("cp_use", cpuInfo.getCpuUsageInPercentage()); jsonObject.addProperty("b_health", batteryInfo.getBatteryHealth()); jsonObject.addProperty("b_level", batteryInfo.getBatteryPercent()); jsonObject.addProperty("temp", batteryInfo.getBatteryTemperature()); jsonObject.addProperty("b_tech", batteryInfo.getBatteryTechnology()); jsonObject.addProperty("b_capacity", batteryInfo.getBatteryCapacity(mContext)); jsonObject.addProperty("sc", storageInfo.getReadableTotalInternalMemorySize()); jsonObject.addProperty("fs", storageInfo.getReadableAvailableInternalMemorySize()); jsonObject.addProperty("res", deviceInformation.getResolution()); jsonObject.addProperty("dpi", deviceInformation.getDPI()); jsonObject.addProperty("s_size", deviceInformation.getScreenWidth() + "x" + deviceInformation.getScreenHeight()); jsonObject.addProperty("r_rate", deviceInformation.getRefreshRate()); jsonObject.addProperty("operator", networkInfo.getOperator()); jsonObject.addProperty("n_type", networkInfo.isConnectedToWifiOrMobile(mContext)); jsonObject.addProperty("ip", networkInfo.getLocalIpAddress()); jsonObject.addProperty("app_package_name", AppInfo.getPackageName(mContext)); jsonObject.addProperty("app_name", AppInfo.getApplicationName(mContext)); jsonObject.addProperty("app_in_date", AppInfo.appInstallDate(mContext)); jsonObject.addProperty("app_up_date", AppInfo.appUpdateDate(mContext)); jsonObject.addProperty("app_ver_code", AppInfo.getAppVersionCode(mContext)); jsonObject.addProperty("app_ver_name", AppInfo.getAppVersionName(mContext)); jsonObject.addProperty("app_state", AppInfo.logAppState()); installedAppsList(mContext); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { jsonObject.addProperty("s_apps", StringCompressor.encodeBase64(systemAppsStringCompressedData)); jsonObject.addProperty("u_apps", StringCompressor.encodeBase64(userAppsStringCompressedData)); } } catch (Exception e) { SensfrxLogger.e("Device Logs Json Exception >> " + e.getMessage()); } JsonParser jsonParser = new JsonParser(); JsonObject jsonObject1 = (JsonObject) jsonParser.parse(jsonObject.toString()); // SensfrxLogger.d("JsonObject : " + jsonObject1); if (Utils.isNetworkAvailable(mContext)) { pushDeviceInformation(mContext, jsonObject1); deviceLogs = jsonObject1; } else { SensfrxLogger.e("Please check your Network."); } } //while login and register public static void logDeviceFingerprints(Context mContext, double latitude, double longitude) { DeviceInformation deviceInformation = new DeviceInformation(mContext); SystemInformation systemInformation = new SystemInformation(mContext); CpuInformation cpuInfo = new CpuInformation(mContext); BatteryInformation batteryInfo = new BatteryInformation(mContext); StorageInformation storageInfo = new StorageInformation(mContext); NetworkInformation networkInfo = new NetworkInformation(mContext); JsonObject jsonObject = new JsonObject(); try { try { jsonObject.addProperty("ex_storage", storageInfo.isOnExternalStorage(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("ex_storage", "0"); } try { jsonObject.addProperty("mock_status", deviceInformation.isMockLocationEnabled() ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("mock_status", "0"); } try { jsonObject.addProperty("app_path", AppCloneDetector.create(mContext).getAppPath()); } catch (Exception e) { jsonObject.addProperty("app_path", ""); } try { jsonObject.addProperty("app_clone", AppCloneDetector.create(mContext).isAppCloned() ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("app_clone", "0"); } try { jsonObject.addProperty("clone_env", AppCloneDetector.create(mContext).isRunningInClonedEnvironment() ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("clone_env", "0"); } try { jsonObject.addProperty("mock_app", deviceInformation.getAppsWithMockPermission(mContext)); } catch (Exception e) { jsonObject.addProperty("mock_app", ""); } try { jsonObject.addProperty("night_mode", deviceInformation.getNightModeStatus()); } catch (Exception e) { jsonObject.addProperty("night_mode", ""); } try { jsonObject.addProperty("screen_brightness", deviceInformation.getScreenBrightness()); } catch (Exception e) { jsonObject.addProperty("screen_brightness", ""); } try { jsonObject.addProperty("power_saving", deviceInformation.isPowerSavingModeEnabled(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("power_saving", ""); } try { jsonObject.addProperty("dnd_mode", deviceInformation.isDNDModeEnabled(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("dnd_mode", "0"); } try { jsonObject.addProperty("volume_levels", deviceInformation.getAllVolumeLevelsInfo()); } catch (Exception e) { jsonObject.addProperty("volume_levels", ""); } try { jsonObject.addProperty("dual_sim", deviceInformation.isDualSim(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("dual_sim", "0"); } try { jsonObject.addProperty("bluetooth", deviceInformation.checkBluetoothStatus()); } catch (Exception e) { jsonObject.addProperty("bluetooth", ""); } try { jsonObject.addProperty("ampm_format", deviceInformation.getAMPMFormat(mContext)); } catch (Exception e) { jsonObject.addProperty("ampm_format", ""); } try { jsonObject.addProperty("lock_type", deviceInformation.getLockTypeString(mContext)); } catch (Exception e) { jsonObject.addProperty("lock_type", ""); } try { jsonObject.addProperty("sim_card_locked", deviceInformation.isSimCardLocked(mContext) ? "1" : "0"); } catch (Exception e) { jsonObject.addProperty("sim_card_locked", "0"); } AppDataUsageHelper appDataUsageHelper = new AppDataUsageHelper(mContext); jsonObject.addProperty("call_status", SessionManager.getInstance(mContext).getCallStatus()); jsonObject.addProperty("p_list", deviceInformation.getManifestPermissions(mContext.getPackageName())); jsonObject.addProperty("app_ln", application_languages); jsonObject.addProperty("d_type", deviceInformation.isEmulatorOrRealDevice() ? "1" : "0"); jsonObject.addProperty("dev_mode", deviceInformation.isDeveloperModeEnabled() ? "1" : "0"); jsonObject.addProperty("token", getRequestToken()); jsonObject.addProperty("rxd", appDataUsageHelper.getReceivedDataUsage()); jsonObject.addProperty("txd", appDataUsageHelper.getTransmittedDataUsage()); jsonObject.addProperty("tz", deviceInformation.getDefaultTimeZoneOffsetInHours()); jsonObject.addProperty("vpn", deviceInformation.isVpnConnected(mContext) ? "1" : "0"); jsonObject.addProperty("model", deviceInformation.getModel()); jsonObject.addProperty("d_mf", deviceInformation.getDeviceManufacturer()); jsonObject.addProperty("d_name", deviceInformation.getDeviceName()); jsonObject.addProperty("b_name", deviceInformation.getBuildBrand()); jsonObject.addProperty("pc", deviceInformation.getProcessor()); jsonObject.addProperty("ln", deviceInformation.getLanguage()); jsonObject.addProperty("r_mode", deviceInformation.getDeviceRingerMode()); jsonObject.addProperty("b_fingerprint", deviceInformation.getFingerprint()); jsonObject.addProperty("d_locale", deviceInformation.getDeviceLocale()); jsonObject.addProperty("ad_ver", systemInformation.getOSVersion()); jsonObject.addProperty("api_level", systemInformation.getSdkVersion()); jsonObject.addProperty("spl", systemInformation.getSecurityPatchLevel()); jsonObject.addProperty("b_number", systemInformation.getBuildHost()); jsonObject.addProperty("base_ver", systemInformation.getBaseBandVersion()); jsonObject.addProperty("vm", systemInformation.getVirtualMachine()); if (RootDetector.isRooted()) { jsonObject.addProperty("root", "1"); } else { jsonObject.addProperty("root", "0"); } jsonObject.addProperty("kernel", systemInformation.getKernelVersion()); jsonObject.addProperty("opengl_es", systemInformation.getOpenGlVersion(mContext)); jsonObject.addProperty("s_up_time", systemInformation.getSystemUpTime()); jsonObject.addProperty("tb", systemInformation.getTreble()); jsonObject.addProperty("soc", cpuInfo.getSOC()); jsonObject.addProperty("processor", cpuInfo.getProcessor()); jsonObject.addProperty("abi", cpuInfo.getCpuAbi()); if (cpuInfo.getBogoMIPS().isEmpty()) { jsonObject.addProperty("mips", "0"); } else { jsonObject.addProperty("mips", cpuInfo.getBogoMIPS()); } jsonObject.addProperty("cp_gov", cpuInfo.getCPUGovernor(0)); jsonObject.addProperty("cp_type", cpuInfo.getCpuArchitecture()); jsonObject.addProperty("core", cpuInfo.getCpuNumOfCores()); jsonObject.addProperty("cp_use", cpuInfo.getCpuUsageInPercentage()); jsonObject.addProperty("b_health", batteryInfo.getBatteryHealth()); jsonObject.addProperty("b_level", batteryInfo.getBatteryPercent()); jsonObject.addProperty("temp", batteryInfo.getBatteryTemperature()); jsonObject.addProperty("b_tech", batteryInfo.getBatteryTechnology()); jsonObject.addProperty("b_capacity", batteryInfo.getBatteryCapacity(mContext)); jsonObject.addProperty("sc", storageInfo.getReadableTotalInternalMemorySize()); jsonObject.addProperty("fs", storageInfo.getReadableAvailableInternalMemorySize()); jsonObject.addProperty("res", deviceInformation.getResolution()); jsonObject.addProperty("dpi", deviceInformation.getDPI()); jsonObject.addProperty("s_size", deviceInformation.getScreenWidth() + "x" + deviceInformation.getScreenHeight()); jsonObject.addProperty("r_rate", deviceInformation.getRefreshRate()); jsonObject.addProperty("operator", networkInfo.getOperator()); jsonObject.addProperty("n_type", networkInfo.isConnectedToWifiOrMobile(mContext)); jsonObject.addProperty("ip", networkInfo.getLocalIpAddress()); jsonObject.addProperty("app_package_name", AppInfo.getPackageName(mContext)); jsonObject.addProperty("app_name", AppInfo.getApplicationName(mContext)); jsonObject.addProperty("app_in_date", AppInfo.appInstallDate(mContext)); jsonObject.addProperty("app_up_date", AppInfo.appUpdateDate(mContext)); jsonObject.addProperty("app_ver_code", AppInfo.getAppVersionCode(mContext)); jsonObject.addProperty("app_ver_name", AppInfo.getAppVersionName(mContext)); //jsonObject.addProperty("app_state", AppInfo.logAppState()); locationEvent = new LocationEvent(latitude, longitude); JsonObject locationObject = new JsonObject(); JsonArray llArray = new JsonArray(); locationObject.addProperty("country", locationEvent.getCountryName(sensFRX.mContext)); locationObject.addProperty("region", locationEvent.getStateName(sensFRX.mContext)); locationObject.addProperty("city", locationEvent.getCityName(sensFRX.mContext)); llArray.add(locationEvent.getLatitude()); // Latitude llArray.add(locationEvent.getLongitude()); // Longitude locationObject.add("ll", llArray); jsonObject.add("loc", locationObject); installedAppsList(mContext); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { jsonObject.addProperty("s_apps", StringCompressor.encodeBase64(systemAppsStringCompressedData)); jsonObject.addProperty("u_apps", StringCompressor.encodeBase64(userAppsStringCompressedData)); } //SensfrxLogger.d("locationObject : " + locationObject); } catch (Exception e) { SensfrxLogger.e("Device Logs Json Exception >> " + e.getMessage()); } JsonParser jsonParser = new JsonParser(); JsonObject jsonObject1 = (JsonObject) jsonParser.parse(jsonObject.toString()); SensfrxLogger.d("Login JsonObject : " + jsonObject1); if (Utils.isNetworkAvailable(mContext)) { pushDeviceInformation(mContext, jsonObject1); deviceLogs = jsonObject1; } else { SensfrxLogger.e("Please check your Network."); } } public static JsonObject getDeviceLogs() { return deviceLogs; } public static String getDeviceFingerprints(double latitude, double longitude) { try { locationEvent = new LocationEvent(latitude, longitude); JsonObject locationObject = new JsonObject(); JsonArray llArray = new JsonArray(); locationObject.addProperty("country", locationEvent.getCountryName(sensFRX.mContext)); locationObject.addProperty("region", locationEvent.getStateName(sensFRX.mContext)); locationObject.addProperty("city", locationEvent.getCityName(sensFRX.mContext)); llArray.add(locationEvent.getLatitude()); llArray.add(locationEvent.getLongitude()); locationObject.add("ll", llArray); deviceLogs.add("loc", locationObject); String sApps = "s_apps"; String uApps = "u_apps"; if (deviceLogs.has(sApps)) { deviceLogs.remove(sApps); } if (deviceLogs.has(uApps)) { deviceLogs.remove(uApps); } String jsonString = deviceLogs.toString(); //SensfrxLogger.d("DF jsonString : " + jsonString); byte[] bytes = jsonString.getBytes(StandardCharsets.UTF_8); String base64Encoded = null; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) { base64Encoded = Base64.getEncoder().encodeToString(bytes); } return base64Encoded; } catch (Exception e) { SensfrxLogger.e("DF Error : " + e.getMessage()); e.printStackTrace(); return ""; } } /** * This method if responsible for creating object of view model * <br>Passing all the collected info to server * <br>Start making an api call * * @param context for view model */ private static void pushDeviceInformation(Context context, JsonObject jsonObject) { MainViewModel mainViewModel = new MainViewModel((Application) context.getApplicationContext()); mainViewModel.sendDeviceInformationToServer(jsonObject).observeForever(deviceInfoResponse -> { SensfrxLogger.d("Sensfrx Request Token Status :> " + deviceInfoResponse.getData().getAstk()); }); } public static void trackClickEvent() { if (TrackAllEvents.getClickStatus()) { deviceCoordinate.start(); ClickEvent clickEvent = new ClickEvent("click_event", uId, new ClickEvent.EventData("" + deviceCoordinate.getDeviceOrientation(getInstance().mContext), "View", new ClickEvent.GyroscopCord("" + DeviceCoordinate.X, "" + DeviceCoordinate.Y, "" + DeviceCoordinate.Z), new ClickEvent.DisplayCord("0", "0"))); addEventInQueue(clickEvent); } } public static void trackTransactionEvent(JsonObject transactionJson) { transactionJson.addProperty("request_token", Sensfrx.getRequestToken()); transactionJson.addProperty("user_id", "" + Sensfrx.getUid()); SensfrxLogger.d("Transaction Log : " + transactionJson); MainViewModel mainViewModel = new MainViewModel((Application) sensFRX.mContext.getApplicationContext()); mainViewModel.pushTransaction(transactionJson).observeForever(transaction -> { SensfrxLogger.d("Sensfrx Transaction :> "); }); } public static void trackClickEvent(MotionEvent event) { TrackAllEvents.logClickEvent(event, sensFRX.mContext); } public static void trackClickEvent(View view) { view.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { TrackAllEvents.logClickEvent(motionEvent, sensFRX.mContext); return true; } }); } public static void addEventInQueue(Event event) { eventQueueManager.add(event); } public static String getRequestToken() throws NullPointerException { if (sessionManager.getUserToken() == null) { Sensfrx.logDeviceFingerprints(sensFRX.mContext); } if (Sensfrx.getInstance() == null) { throw new SensfrxError("Sensfrx SDK must be configured before calling this method"); } return sessionManager.getUserToken() != null ? sessionManager.getUserToken() : "null"; } public static String getRequestTokenForLogin(double latitude, double longitude) throws NullPointerException { if (sessionManager.getUserToken() == null) { Sensfrx.logDeviceFingerprints(sensFRX.mContext, latitude, longitude); } eventQueueManager.logEventsToServer(); if (Sensfrx.getInstance() == null) { throw new SensfrxError("Sensfrx SDK must be configured before calling this method"); } else Sensfrx.logDeviceFingerprints(sensFRX.mContext, latitude, longitude); return sessionManager.getUserToken() != null ? sessionManager.getUserToken() : "null"; } public static void trackLocation(double latitude, double longitude) { locationEvent = new LocationEvent(latitude, longitude); } public static LocationInfo locationLogEvent(Context context) { if (locationEvent == null) { // Handle the null case here, such as returning a default LocationInfo object or throwing an exception return new LocationInfo("", "", "", "", "", "", ""); } return new LocationInfo(locationEvent.getLatitude() != Double.NaN ? String.valueOf(locationEvent.getLatitude()) : "", locationEvent.getLongitude() != Double.NaN ? String.valueOf(locationEvent.getLongitude()) : "", locationEvent.getCityName(context) != null ? locationEvent.getCityName(context) : "", locationEvent.getStateName(context) != null ? locationEvent.getStateName(context) : "", locationEvent.getCountryName(context) != null ? locationEvent.getCountryName(context) : "", locationEvent.getAddressLine(context) != null ? locationEvent.getAddressLine(context) : "", locationEvent.getPostalCode(context) != null ? locationEvent.getPostalCode(context) : ""); } private static void installedAppsList(Context context) { PackageManager packageManager = context.getPackageManager(); List<PackageInfo> packList = packageManager.getInstalledPackages(0); StringBuilder userAppsStringBuilder = new StringBuilder(); StringBuilder systemAppsStringBuilder = new StringBuilder(); try { for (int i = 0; i < packList.size(); i++) { PackageInfo packInfo = packList.get(i); String packageName = packInfo.packageName; if ((packInfo.applicationInfo.flags & ApplicationInfo.FLAG_SYSTEM) == 0) { // User-installed app if (userAppsStringBuilder.length() > 0) { userAppsStringBuilder.append(", "); } userAppsStringBuilder.append(packageName); } else { // System app if (systemAppsStringBuilder.length() > 0) { systemAppsStringBuilder.append(", "); } systemAppsStringBuilder.append(packageName); } } } catch (Exception e) { SensfrxLogger.e("Some error while getting installed app list"); } String userAppsString = userAppsStringBuilder.toString(); String systemAppsString = systemAppsStringBuilder.toString(); try { // Compress the string userAppsStringCompressedData = StringCompressor.compressString(userAppsString); systemAppsStringCompressedData = StringCompressor.compressString(systemAppsString); // Decompress the compressed string //String decompressedString = StringCompressor.decompressString(userAppsStringCompressedData); } catch (IOException e) { e.printStackTrace(); } } public String getSecretKey() { return senseFRXConfiguration.secretKey(); } public static void setUid(String userId) { uId = userId; } public static String getUid() { return uId != null ? uId : null; } public static Sensfrx getInstance() { if (sensFRX == null) { throw new SensfrxError("Sensfrx SDK must be configured before calling this method"); } return sensFRX; } public static void flush() { eventQueueManager.logEventsToServer(); } public static void destroy(Application application) { if (sensFRX != null) { sensFRX.unregisterLifeCycleCallbacks(application); sensFRX = null; } } private void unregisterLifeCycleCallbacks(Application application) { application.unregisterActivityLifecycleCallbacks(activityLifecycleCallbacks); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SensfrxActivityLifecycleCallbacks.java
package ai.sensfrx; import android.app.Activity; import android.app.Application; import android.os.Bundle; import androidx.annotation.NonNull; import ai.sensfrx.em.TrackAllEvents; class SensfrxActivityLifecycleCallbacks implements Application.ActivityLifecycleCallbacks { @Override public void onActivityCreated(@NonNull Activity activity, Bundle bundle) { TrackAllEvents.logScreenEvent(activity, "Activity Created"); } @Override public void onActivityStarted(@NonNull Activity activity) { TrackAllEvents.logScreenEvent(activity, "Activity Started"); } @Override public void onActivityResumed(@NonNull Activity activity) { TrackAllEvents.logScreenEvent(activity, "Activity Resumed"); } @Override public void onActivityPaused(@NonNull Activity activity) { TrackAllEvents.logScreenEvent(activity, "Activity Paused"); } @Override public void onActivityStopped(@NonNull Activity activity) { TrackAllEvents.logScreenEvent(activity, "Activity Stopped"); } @Override public void onActivitySaveInstanceState(@NonNull Activity activity, @NonNull Bundle bundle) { TrackAllEvents.logScreenEvent(activity, "Save Instance State"); } @Override public void onActivityDestroyed(@NonNull Activity activity) { TrackAllEvents.logScreenEvent(activity, "Activity Destroyed"); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SensfrxAppLifecycleCallbacks.java
package ai.sensfrx; import android.app.Activity; import android.app.Application; import android.os.Bundle; import android.os.Handler; import ai.sensfrx.em.TrackAllEvents; /** * Handles the application's lifecycle callbacks to track app state changes. */ class SensfrxAppLifecycleCallbacks implements Application.ActivityLifecycleCallbacks { private boolean isAppInBackground = true; // Indicates if the app is in the background private int activityCount = 0; // Count of activities currently in the foreground private final Handler handler = new Handler(); // Handler for posting delayed tasks private Runnable foregroundRunnable; // Runnable to track app background transition @Override public void onActivityCreated(Activity activity, Bundle bundle) { if (activityCount == 0 && isAppInBackground) { // App is being opened isAppInBackground = false; TrackAllEvents.logAppEvent("OPEN"); } activityCount++; } @Override public void onActivityStarted(Activity activity) { // No action needed } @Override public void onActivityResumed(Activity activity) { if (isAppInBackground) { // App has been brought to the foreground isAppInBackground = false; TrackAllEvents.logAppEvent("FOREGROUND"); } if (foregroundRunnable != null) { handler.removeCallbacks(foregroundRunnable); } } @Override public void onActivityPaused(Activity activity) { foregroundRunnable = () -> { // App has been sent to the background isAppInBackground = true; TrackAllEvents.logAppEvent("BACKGROUND"); }; handler.postDelayed(foregroundRunnable, 500); // Delay to determine if app is in the background } @Override public void onActivityStopped(Activity activity) { activityCount--; if (activityCount == 0 && isAppInBackground) { handler.removeCallbacks(foregroundRunnable); // App is closed TrackAllEvents.logAppEvent("CLOSED"); } } @Override public void onActivitySaveInstanceState(Activity activity, Bundle bundle) { // No action needed } @Override public void onActivityDestroyed(Activity activity) { activityCount--; if (activityCount == 0 && isAppInBackground) { handler.removeCallbacks(foregroundRunnable); // App is closed TrackAllEvents.logAppEvent("CLOSED"); } } /** * @return True if the app is in the background, false otherwise */ public boolean isAppInBackground() { return isAppInBackground; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SensfrxCallStateListener.java
package ai.sensfrx; import android.content.Context; import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import ai.sensfrx.data.model.CallEvent; import ai.sensfrx.utils.SensfrxLogger; /** * Custom PhoneStateListener to monitor call state changes and log events. */ public class SensfrxCallStateListener extends PhoneStateListener { private static final String TAG = "SenseFRXCallState"; private final Context context; /** * Constructor for SensfrxCallStateListener. * * @param context The context in which the listener is operating. */ public SensfrxCallStateListener(Context context) { this.context = context; } /** * Called when the phone call state changes. * * @param state The current phone call state. * @param incomingNumber The phone number of the incoming call, if applicable. */ @Override public void onCallStateChanged(int state, String incomingNumber) { super.onCallStateChanged(state, incomingNumber); switch (state) { case TelephonyManager.CALL_STATE_IDLE: // Phone is idle (no call in progress) SensfrxLogger.d("onCallStateChanged: Call state is idle"); CallEvent callEvent = new CallEvent("call_event", Sensfrx.getUid(), new CallEvent.EventData("No call")); Sensfrx.addEventInQueue(callEvent); SessionManager.getInstance(context).setCallStatus("No call"); Sensfrx.flush(); break; case TelephonyManager.CALL_STATE_RINGING: // Phone is ringing (incoming call) SensfrxLogger.d("onCallStateChanged: Phone is ringing"); callEvent = new CallEvent("call_event", Sensfrx.getUid(), new CallEvent.EventData("Phone is ringing")); Sensfrx.addEventInQueue(callEvent); SessionManager.getInstance(context).setCallStatus("Phone is ringing"); Sensfrx.flush(); break; case TelephonyManager.CALL_STATE_OFFHOOK: // Phone is off-hook (active call) SensfrxLogger.d("onCallStateChanged: Phone is off-hook (active call)"); callEvent = new CallEvent("call_event", Sensfrx.getUid(), new CallEvent.EventData("Active call")); Sensfrx.addEventInQueue(callEvent); SessionManager.getInstance(context).setCallStatus("Active call"); Sensfrx.flush(); break; } } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SensfrxConfiguration.java
package ai.sensfrx; /** * Configuration class for Sensfrx with settings for queue sizes and secret key. */ public class SensfrxConfiguration { private static final int DEFAULT_QUEUE_SIZE = 20; // Default size of the queue private static final int DEFAULT_MAX_QUEUE_SIZE = 100; // Default maximum size of the queue private final int mDefaultQueueSize; // Default queue size private final int mMaxQueueSize; // Maximum queue size private final String secretKey; // Secret key for configuration /** * Default constructor using default values. */ public SensfrxConfiguration() { this(new Builder()); } private SensfrxConfiguration(Builder builder) { this.mDefaultQueueSize = builder.defaultQueueSize(); this.mMaxQueueSize = builder.maxQueueSize(); this.secretKey = builder.secretKey(); } /** * @return Default queue size */ public int defaultQueueSize() { return mDefaultQueueSize; } /** * @return Maximum queue size */ public int maxQueueSize() { return mMaxQueueSize; } /** * @return Secret key */ public String secretKey() { return secretKey; } /** * Builder class for creating instances of SensfrxConfiguration. */ public static final class Builder { private int defaultQueueSize; private int maxQueueSize; private String secretKey; /** * Create builder with default values. */ public Builder() { defaultQueueSize = DEFAULT_QUEUE_SIZE; maxQueueSize = DEFAULT_MAX_QUEUE_SIZE; } /** * Create builder with values from an existing configuration. * * @param configuration Existing SensfrxConfiguration to copy values from */ public Builder(SensfrxConfiguration configuration) { defaultQueueSize = configuration.defaultQueueSize(); maxQueueSize = configuration.maxQueueSize(); secretKey = configuration.secretKey(); } /** * @return Default queue size */ public int defaultQueueSize() { return defaultQueueSize; } /** * Set the default queue size. * * @param defaultQueueSize Default size of the queue * @return Builder instance */ private Builder defaultQueueSize(int defaultQueueSize) { this.defaultQueueSize = defaultQueueSize; return this; } /** * @return Maximum queue size */ public int maxQueueSize() { return maxQueueSize; } /** * Set the maximum queue size. * * @param maxQueueSize Maximum size of the queue * @return Builder instance */ private Builder maxQueueSize(int maxQueueSize) { this.maxQueueSize = maxQueueSize; return this; } /** * Set the secret key. * * @param secretKey Secret key for configuration * @return Builder instance */ public Builder secretKey(String secretKey) { this.secretKey = secretKey; return this; } /** * @return Secret key */ public String secretKey() { return secretKey; } /** * Build and return an instance of SensfrxConfiguration. * * @return Instance of SensfrxConfiguration */ public SensfrxConfiguration build() { return new SensfrxConfiguration(this); } } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SensfrxError.java
package ai.sensfrx; /** * Custom error class for Sensfrx-related errors. * Extends the standard Java {@link Error} class to represent serious application errors. */ class SensfrxError extends Error { /** * Constructs a new SensfrxError with the specified error message. * * @param message The detail message which will be prefixed with "Sensfrx : ". */ public SensfrxError(String message) { super("Sensfrx : " + message); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SensfrxFragmentLifecycleCallbacks.java
package ai.sensfrx; import android.content.Context; import android.os.Bundle; import android.view.View; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.fragment.app.Fragment; import androidx.fragment.app.FragmentManager; import ai.sensfrx.em.TrackAllEvents; /** * Utility class for tracking fragment lifecycle events within an AppCompatActivity. */ public class SensfrxFragmentLifecycleCallbacks { /** * Registers fragment lifecycle callbacks to track various lifecycle events of fragments. * * @param activity The AppCompatActivity within which fragment lifecycle events are tracked. */ public static void trackFragmentChange(AppCompatActivity activity) { // Register a FragmentLifecycleCallbacks to track fragment lifecycle events activity.getSupportFragmentManager().registerFragmentLifecycleCallbacks(new FragmentManager.FragmentLifecycleCallbacks() { // Called when a fragment is attached to its host activity @Override public void onFragmentAttached(@NonNull FragmentManager fm, @NonNull Fragment fragment, @NonNull Context context) { super.onFragmentAttached(fm, fragment, context); TrackAllEvents.logScreenEvent(fragment, "Fragment Attached"); } // Called when a fragment is created @Override public void onFragmentCreated(@NonNull FragmentManager fm, @NonNull Fragment fragment, Bundle savedInstanceState) { super.onFragmentCreated(fm, fragment, savedInstanceState); TrackAllEvents.logScreenEvent(fragment, "Fragment Created"); } // Called when the fragment's view has been created @Override public void onFragmentViewCreated(@NonNull FragmentManager fm, @NonNull Fragment fragment, @NonNull View v, Bundle savedInstanceState) { super.onFragmentViewCreated(fm, fragment, v, savedInstanceState); TrackAllEvents.logScreenEvent(fragment, "Fragment View Created"); } // Called when the fragment is started @Override public void onFragmentStarted(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentStarted(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment Started"); } // Called when the fragment is resumed @Override public void onFragmentResumed(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentResumed(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment Resumed"); } // Called when the fragment is paused @Override public void onFragmentPaused(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentPaused(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment Paused"); } // Called when the fragment is stopped @Override public void onFragmentStopped(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentStopped(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment Stopped"); } // Called when the fragment's state is saved @Override public void onFragmentSaveInstanceState(@NonNull FragmentManager fm, @NonNull Fragment fragment, @NonNull Bundle outState) { super.onFragmentSaveInstanceState(fm, fragment, outState); TrackAllEvents.logScreenEvent(fragment, "Fragment Save Instance State"); } // Called when the fragment's view is destroyed @Override public void onFragmentViewDestroyed(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentViewDestroyed(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment View Destroyed"); } // Called when the fragment is destroyed @Override public void onFragmentDestroyed(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentDestroyed(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment Destroyed"); } // Called when a fragment is detached from its host activity @Override public void onFragmentDetached(@NonNull FragmentManager fm, @NonNull Fragment fragment) { super.onFragmentDetached(fm, fragment); TrackAllEvents.logScreenEvent(fragment, "Fragment Detached"); } }, true); // Pass true to track all existing fragments as well as future fragments } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/SessionManager.java
package ai.sensfrx; import android.content.Context; import android.content.SharedPreferences; public class SessionManager { private static final String PREFERENCE_NAME = "auth_safe_session"; private static final String USER_TOKEN = "user_token"; private static final String DEVICE_ID = "device_id"; private static final String VERSION_CODE = "version_code"; private static final String VERSION_NAME = "version_name"; private static final String CALL_STATUS = "call_status"; private static volatile SessionManager mInstance = null; private SharedPreferences preferences; private SessionManager() { } /** * Using singleton for SessionManager * * @param context as para * @return mInstance */ public static SessionManager getInstance(Context context) { if (mInstance == null) { synchronized (SessionManager.class) { if (mInstance == null) { mInstance = new SessionManager(); } if (mInstance.preferences == null) { mInstance.preferences = context.getSharedPreferences(PREFERENCE_NAME, Context.MODE_PRIVATE); } } } return mInstance; } /** * @return return senseFRXToken */ public String getUserToken() { return getPreferences().getString(USER_TOKEN, null); } public void setUserToken(String userToken) { getPreferencesEditor().putString(USER_TOKEN, userToken).commit(); } public String getDeviceId() { return getPreferences().getString(DEVICE_ID, null); } public void setDeviceId(String deviceId) { getPreferencesEditor().putString(DEVICE_ID, deviceId).commit(); } String getVersionName() { return getPreferences().getString(VERSION_NAME, null); } void setVersionName(String version) { getPreferencesEditor().putString(VERSION_NAME, version).commit(); } int getVersionCode() { return getPreferences().getInt(VERSION_CODE, -1); } void setVersionCode(int build) { getPreferencesEditor().putInt(VERSION_CODE, build).commit(); } String getCallStatus() { return getPreferences().getString(CALL_STATUS, null); } void setCallStatus(String status) { getPreferencesEditor().putString(CALL_STATUS, status).commit(); } private SharedPreferences getPreferences() { return preferences; } private SharedPreferences.Editor getPreferencesEditor() { return preferences.edit(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/MainViewModel.java
package ai.sensfrx.data; import android.app.Application; import androidx.annotation.NonNull; import androidx.lifecycle.AndroidViewModel; import androidx.lifecycle.MutableLiveData; import ai.sensfrx.sensfrx.data.model.DeviceInfoResponse; import com.google.gson.JsonObject; /** * ViewModel class for managing UI-related data in a lifecycle-conscious way. * This ViewModel handles data operations related to the Sensfrx SDK. */ public class MainViewModel extends AndroidViewModel { // Repository instance for interacting with data sources private final SensfrxRepository repository; /** * Constructs a new MainViewModel with the given application context. * * @param application The application context used for initializing the repository. */ public MainViewModel(@NonNull Application application) { super(application); // Initialize the repository with the application context repository = new SensfrxRepository(application); } /** * Sends device information to the server using the repository. * * @param deviceFingerprintObject A JsonObject containing device information. * @return A MutableLiveData object containing the response from the server. */ public MutableLiveData<DeviceInfoResponse> sendDeviceInformationToServer(JsonObject deviceFingerprintObject) { return repository.sendDeviceInformationToServer(deviceFingerprintObject); } /** * Pushes event logs to the server using the repository. * * @param event A JsonObject containing event data. * @return A MutableLiveData object containing the response from the server. */ public MutableLiveData<JsonObject> pushEvents(JsonObject event) { return repository.pushEvents(event); } /** * Notifies the Sensfrx server of a transaction. * * @param transactionJson A JsonObject containing transaction data. * @return A MutableLiveData object containing the response from the server. */ public MutableLiveData<JsonObject> pushTransaction(JsonObject transactionJson) { return repository.pushTransaction(transactionJson); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/SensfrxRepository.java
package ai.sensfrx.data; import android.app.Application; import android.content.Context; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.lifecycle.MutableLiveData; import ai.sensfrx.Sensfrx; import ai.sensfrx.SessionManager; import ai.sensfrx.df.AppInfo; import ai.sensfrx.network.RetrofitClient; import ai.sensfrx.sensfrx.data.model.DeviceInfoResponse; import com.google.gson.Gson; import com.google.gson.JsonObject; import org.json.JSONObject; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import ai.sensfrx.utils.AppSignature; import ai.sensfrx.utils.Constants; import ai.sensfrx.utils.SensfrxLogger; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; class SensfrxRepository { /** * Repository for making a network calls * <br> */ private final MutableLiveData<DeviceInfoResponse> deviceInfoResponse; private final MutableLiveData<JsonObject> eventApiResponse; private static ExecutorService executor; private final Context context; /** * required constructor for instantiation * * @param application as context */ public SensfrxRepository(Application application) { deviceInfoResponse = new MutableLiveData<>(); eventApiResponse = new MutableLiveData<>(); executor = Executors.newSingleThreadExecutor(); this.context = application; } /** * Syncing data to server using executor * * @param jsonObject as request * @return response */ public MutableLiveData<DeviceInfoResponse> sendDeviceInformationToServer(JsonObject jsonObject) { executor.execute(() -> { String timestamp = String.valueOf(System.currentTimeMillis()); String sign = AppSignature.generateSignature("token"); //SensfrxLogger.d("PackageName > " + AppInfo.getPackageName(context.getApplicationContext())); //SensfrxLogger.d("SECRET_KEY > " + Constants.SECRET_KEY); //SensfrxLogger.d("Timestamp > " + timestamp); //SensfrxLogger.d("Signature > " + sign); //SensfrxLogger.d("jsonObject > " + jsonObject); Call<DeviceInfoResponse> call = RetrofitClient.getInstance().apiServices().requestForToken(AppInfo.getPackageName(context), Constants.SECRET_KEY, timestamp, sign, jsonObject); call.enqueue(new Callback<DeviceInfoResponse>() { @Override public void onResponse(@NonNull Call<DeviceInfoResponse> call, @NonNull Response<DeviceInfoResponse> response) { try { if (response.body().getStatus() == 200) { try { SensfrxLogger.d("Auth Token >> " + response.body().getData().getAstk()); SessionManager sessionManager = SessionManager.getInstance(context); sessionManager.setUserToken(response.body().getData().getAstk()); } catch (Exception e) { SensfrxLogger.e("Exception Occur >> " + e.getMessage()); } } else { SensfrxLogger.d("Device Information Response Message >> " + response.body().getMessage()); } } catch (Exception exception) { SensfrxLogger.d("Error Message >> " + exception.getMessage()); } } @Override public void onFailure(@NonNull Call<DeviceInfoResponse> call, @NonNull Throwable t) { SensfrxLogger.e("Device Information Logged Failed >> " + t.getMessage()); } }); }); return deviceInfoResponse; } /** * Syncing Events to server using executor * * @param events object as request * @return response */ public MutableLiveData<JsonObject> pushEvents(JsonObject events) { SensfrxLogger.d("Event Request >> " + events.toString()); executor.execute(() -> { String timestamp = String.valueOf(System.currentTimeMillis()); String sign = AppSignature.generateSignature("user_events"); Call<JsonObject> call = RetrofitClient.getInstance().apiServices().logEvents(AppInfo.getPackageName(context), Constants.SECRET_KEY, SessionManager.getInstance(context).getUserToken(),timestamp, sign, events); call.enqueue(new Callback<JsonObject>() { @Override public void onResponse(@NonNull Call<JsonObject> call, @NonNull Response<JsonObject> response) { if (response.code() == 200) { try { assert response.body() != null; eventApiResponse.setValue(response.body()); JSONObject jsonObject = new JSONObject(new Gson().toJson(response.body())); if (jsonObject.getString("status").equalsIgnoreCase("200")) { //Toast.makeText(context, jsonObject.getString("message"), Toast.LENGTH_SHORT).show(); } else if (jsonObject.getString("status").equalsIgnoreCase("403") && jsonObject.getString("message").equalsIgnoreCase("Token Expired")) { Sensfrx.reGenerateToken(); } else { SensfrxLogger.d("Error Message >> " + jsonObject.getString("message")); } } catch (Exception e) { SensfrxLogger.e("Exception Occur >> " + e.getMessage()); } } else { SensfrxLogger.d("Error Message >> " + response.code()); } } @Override public void onFailure(@NonNull Call<JsonObject> call, @NonNull Throwable t) { SensfrxLogger.e("Events Logged Failed >> " + t.getMessage()); // Handle failure response here JsonObject errorResponse = new JsonObject(); errorResponse.addProperty("status", "500"); errorResponse.addProperty("message", "Events logged failed"); eventApiResponse.setValue(errorResponse); } }); }); return eventApiResponse; } /** * This api call will inform to Sensfrx server about transaction. * * @param transactionJson object as request * @return response */ public MutableLiveData<JsonObject> pushTransaction(JsonObject transactionJson) { SensfrxLogger.d("Transaction Request >> " + transactionJson); executor.execute(() -> { String timestamp = String.valueOf(System.currentTimeMillis()); String sign = AppSignature.generateSignature("transaction"); Call<JsonObject> call = RetrofitClient.getInstance().apiServices().transaction(AppInfo.getPackageName(context), Constants.SECRET_KEY, timestamp, sign, transactionJson); call.enqueue(new Callback<JsonObject>() { @Override public void onResponse(@NonNull Call<JsonObject> call, @NonNull Response<JsonObject> response) { SensfrxLogger.d("Transaction Response >> " + response.body()); if (response.code() == 200) { try { JSONObject jsonObject = new JSONObject(new Gson().toJson(response.body())); if (jsonObject.getString("status").equalsIgnoreCase("allow")) { SensfrxLogger.d("Success Message >> " + jsonObject.getString("message")); //Toast.makeText(context, jsonObject.getString("message"), Toast.LENGTH_SHORT).show(); } else { SensfrxLogger.d("Error Message >> " + jsonObject.getString("message")); } } catch (Exception e) { SensfrxLogger.e("Exception Occur >> " + e.getMessage()); } } else { SensfrxLogger.d("Error Message >> " + response.code()); } } @Override public void onFailure(@NonNull Call<JsonObject> call, @NonNull Throwable t) { SensfrxLogger.e("Transaction Request Failed >> " + t.getMessage()); } }); }); return eventApiResponse; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/AppEvent.java
package ai.sensfrx.data.model; import com.google.gson.annotations.SerializedName; public class AppEvent extends Event { @SerializedName("event_data") private final EventData eventData; public AppEvent(String eventName, String uId, EventData eventData) { super(eventName, uId); this.eventType = EVENT_TYPE_APP; this.eventData = eventData; } public static class EventData { public EventData(String scName, String scState) { this.scState = scState; } @SerializedName("app_state") private String scState; public String getScState() { return scState; } public void setScState(String scState) { this.scState = scState; } @Override public String toString() { return "{" + "appState='" + scState + '\'' + '}'; } } @Override public String toString() { return "{" + "eventName='" + eventName + '\'' + ", uId='" + uId + '\'' + ", timeStamp='" + timeStamp + '\'' + ", eventType='" + eventType + '\'' + ", eventData=" + eventData + '}'; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/CallEvent.java
package ai.sensfrx.data.model; import com.google.gson.annotations.SerializedName; public class CallEvent extends Event { @SerializedName("event_data") private final EventData eventData; public CallEvent(String eventName, String uId, EventData eventData) { super(eventName, uId); this.eventType = EVENT_ON_GOING_CALL; this.eventData = eventData; } public static class EventData { public EventData(String callState) { this.callState = callState; } @SerializedName("call_state") private String callState; @Override public String toString() { return "{" + "callState='" + callState + '\'' + '}'; } } @Override public String toString() { return "{" + "eventName='" + eventName + '\'' + ", uId='" + uId + '\'' + ", timeStamp='" + timeStamp + '\'' + ", eventType='" + eventType + '\'' + ", eventData=" + eventData + '}'; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/ClickEvent.java
package ai.sensfrx.data.model; import com.google.gson.annotations.SerializedName; public class ClickEvent extends Event { /** * Model for Click Event * <br></br> * In the click Event we will get display and gyroscope coordinates */ @SerializedName("event_data") private final EventData eventData; public ClickEvent(String eventName, String uId, EventData eventData) { super(eventName, uId); this.eventType = EVENT_TYPE_CLICK; this.eventName = eventName; this.eventData = eventData; } public static class EventData { public EventData(String dOrientation, String motionEvent, GyroscopCord gyroscopCord, DisplayCord displayCord) { this.dOrientation = dOrientation; this.motionEvent = motionEvent; this.gyroscopCord = gyroscopCord; this.displayCord = displayCord; } @SerializedName("d_orientation") private final String dOrientation; @SerializedName("motion_event") private final String motionEvent; @SerializedName("gyroscope_cord") private final GyroscopCord gyroscopCord; @SerializedName("display_cord") private final DisplayCord displayCord; @Override public String toString() { return "{" + "dOrientation='" + dOrientation + '\'' + ", motionEvent='" + motionEvent + '\'' + ", gyroscopeCord=" + gyroscopCord + ", displayCord=" + displayCord + '}'; } } public static class DisplayCord { @SerializedName("x") private final String x; @SerializedName("y") private final String y; public DisplayCord(String x, String y) { this.x = x; this.y = y; } @Override public String toString() { return "{" + "x='" + x + '\'' + ", y='" + y + '\'' + '}'; } } public static class GyroscopCord { @SerializedName("x") private final String x; @SerializedName("y") private final String y; @SerializedName("z") private final String z; public GyroscopCord(String x, String y, String z) { this.x = x; this.y = y; this.z = z; } @Override public String toString() { return "{" + "x='" + x + '\'' + ", y='" + y + '\'' + ", z='" + z + '\'' + '}'; } } @Override public String toString() { return "{" + "eventData=" + eventData + ", eventName='" + eventName + '\'' + ", uId='" + uId + '\'' + ", timeStamp='" + timeStamp + '\'' + ", eventType='" + eventType + '\'' + '}'; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/DeviceInfoResponse.java
package ai.sensfrx.sensfrx.data.model; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class DeviceInfoResponse { /** * Model for handle the request token api response */ @SerializedName("status") @Expose private Integer status; @SerializedName("message") @Expose private String message; @SerializedName("data") @Expose private DeviceInfoResponseData data; public Integer getStatus() { return status; } public void setStatus(Integer status) { this.status = status; } public DeviceInfoResponseData getData() { return data; } public void setData(DeviceInfoResponseData data) { this.data = data; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public static class DeviceInfoResponseData { @SerializedName("astk") @Expose private String astk; public String getAstk() { return astk; } public void setAstk(String astk) { this.astk = astk; } } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/DeviceLogs.java
package ai.sensfrx.data.model; import com.google.gson.JsonObject; import com.google.gson.annotations.SerializedName; /** * Created by Sensfrx on 14-09-2022. * <br> * Copyright (c) 2022 SecureLayer7 Technologies. All rights reserved. */ public class DeviceLogs extends Event { @SerializedName("event_data") private final EventData eventData; public DeviceLogs(String eventName, String uId, EventData eventData) { super(eventName, uId); this.eventType = EVENT_DEVICE_LOG; this.eventData = eventData; } public static class EventData { public EventData(JsonObject jsonObject) { this.jsonObject = jsonObject; } @SerializedName("device_log") private JsonObject jsonObject; @Override public String toString() { return "{" + "" + jsonObject + '}'; } } @Override public String toString() { return "{" + "eventData=" + eventData + ", eventName='" + eventName + '\'' + ", uId='" + uId + '\'' + ", timeStamp='" + timeStamp + '\'' + ", eventType='" + eventType + '\'' + '}'; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/Event.java
package ai.sensfrx.data.model; import ai.sensfrx.utils.Utils; import com.google.gson.annotations.SerializedName; public abstract class Event { public static final String EVENT_TYPE_APP = "app_event"; public static final String EVENT_TYPE_LOCATION = "location"; public static final String EVENT_TYPE_SCREEN = "screen"; public static final String EVENT_TYPE_CLICK = "click"; public static final String EVENT_DEVICE_LOG = "logs"; public static final String EVENT_ON_GOING_CALL = "call"; @SerializedName("event_name") String eventName; @SerializedName("uId") String uId; @SerializedName("time_stamp") final String timeStamp; @SerializedName("event_type") String eventType; protected Event(String eventName, String uId) { this.eventName = eventName; this.uId = uId; this.timeStamp = Utils.getTimestamp(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/LocationInfo.java
package ai.sensfrx.data.model; /** * Created by Sensfrx on 07-09-2022. * <br> * Copyright (c) 2022 SecureLayer7 Technologies. All rights reserved. */ public class LocationInfo { public LocationInfo(String latitude, String longitude, String city, String state, String country, String address, String pin) { this.latitude = latitude; this.longitude = longitude; this.city = city; this.state = state; this.country = country; this.address = address; this.pin = pin; } private String latitude; private String longitude; private String city; private String state; private String country; private String address; private String pin; }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/data/model/ScreenChangeEvent.java
package ai.sensfrx.data.model; import com.google.gson.annotations.SerializedName; public class ScreenChangeEvent extends Event { @SerializedName("event_data") private final EventData eventData; public ScreenChangeEvent(String eventName, String uId, EventData eventData) { super(eventName, uId); this.eventType = EVENT_TYPE_SCREEN; this.eventData = eventData; } public static class EventData { public EventData(String scName, String scState) { this.scName = scName; this.scState = scState; } @SerializedName("sc_name") private String scName; @SerializedName("sc_state") private String scState; public String getScName() { return scName; } public void setScName(String scName) { this.scName = scName; } public String getScState() { return scState; } public void setScState(String scState) { this.scState = scState; } @Override public String toString() { return "{" + "scName='" + scName + '\'' + ", scState='" + scState + '\'' + '}'; } } @Override public String toString() { return "{" + "eventName='" + eventName + '\'' + ", uId='" + uId + '\'' + ", timeStamp='" + timeStamp + '\'' + ", eventType='" + eventType + '\'' + ", eventData=" + eventData + '}'; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/AppInfo.java
package ai.sensfrx.df; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import java.util.Date; public class AppInfo { /** * Getting application name * * @param mContext as request * @return appName */ public static String getApplicationName(Context mContext) { return (String) mContext.getPackageManager().getApplicationLabel(getApplicationInfo(mContext)); } /** * Getting package name * * @param context as request * @return packageName */ public static String getPackageName(Context context) { return context.getPackageName(); } /** * Getting application information * * @param mContext as request * @return appInfo */ private static ApplicationInfo getApplicationInfo(Context mContext) { PackageManager packageManager = mContext.getPackageManager(); try { return packageManager.getApplicationInfo(mContext.getPackageName(), 0); } catch (PackageManager.NameNotFoundException e) { throw new AssertionError("Package not found: " + mContext.getPackageName()); } } /** * Getting application install date. * * @param mContext as request * @return installDate */ public static String appInstallDate(Context mContext) { String stringPackageName = mContext.getPackageName(); long longInstallationDate = 0; try { longInstallationDate = mContext.getPackageManager().getPackageInfo(stringPackageName, 0).firstInstallTime; } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); } Date date = new Date(longInstallationDate); return date.toString(); } /** * Getting application updated date * * @param mContext as request * @return updateDate */ public static String appUpdateDate(Context mContext) { String stringPackageName = mContext.getPackageName(); long longUpdateDate = 0; try { longUpdateDate = mContext.getPackageManager().getPackageInfo(stringPackageName, 0).lastUpdateTime; } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); } Date date = new Date(longUpdateDate); return date.toString(); } /** * Getting package information * * @param mContext as request * @return packageInfo */ private static PackageInfo getPackageInfo(Context mContext) { PackageManager packageManager = mContext.getPackageManager(); try { return packageManager.getPackageInfo(mContext.getPackageName(), 0); } catch (PackageManager.NameNotFoundException e) { throw new AssertionError("Package not found: " + mContext.getPackageName()); } } /** * Getting application version name from package info * * @param mContext as request * @return versionName */ public static String getAppVersionName(Context mContext) { return getPackageInfo(mContext).versionName; } /** * Getting application version code from package info * * @param mContext as request * @return versionCode */ public static int getAppVersionCode(Context mContext) { return getPackageInfo(mContext).versionCode; } private static String mAppState = null; /** * Getting App state either application install, update, open * * @param previousBuild as parameter * @param appVersionCode as parameter * @return state */ public static String getAppState(int previousBuild, int appVersionCode) { if (previousBuild == -1) { mAppState = "Application Installed"; } else if (appVersionCode != previousBuild) { mAppState = "Application Updated"; } else { mAppState = "Application Open"; } return mAppState; } /** * @return appState */ public static String logAppState() { return mAppState; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/BatteryInformation.java
package ai.sensfrx.df; import android.content.Context; public class BatteryInformation extends DeviceFingerprint { /** * Battery Constructor * * @param context as para */ public BatteryInformation(Context context) { super(context); } @Override public final int getBatteryPercent() { return super.getBatteryPercent(); } @Override public final boolean isPhoneCharging() { return super.isPhoneCharging(); } @Override public final String getBatteryHealth() { return super.getBatteryHealth(); } @Override public final String getBatteryTechnology() { return super.getBatteryTechnology(); } @Override public final float getBatteryTemperature() { return super.getBatteryTemperature(); } @Override public final String getChargingSource() { return super.getChargingSource(); } @Override public final double getBatteryCapacity(Context context) { return super.getBatteryCapacity(context); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/CpuInformation.java
package ai.sensfrx.df; import android.content.Context; public class CpuInformation extends DeviceFingerprint { /** * This class is responsible for fetching the cpu information */ public CpuInformation(Context context) { super(context); } @Override public final String getCpuArchitecture() { return super.getCpuArchitecture(); } @Override public final int getCpuNumOfCores() { return super.getCpuNumOfCores(); } @Override public final String getBogoMIPS() { return super.getBogoMIPS(); } @Override public final String getDevice() { return super.getDevice(); } @Override public final String getCPUGovernor(int core) { return super.getCPUGovernor(core); } @Override public final String getCpuAbi() { return super.getCpuAbi(); } @Override public final int getCpuUsageInPercentage() { return super.getCpuUsageInPercentage(); } public final String getSOC() { return getCpuInfoMap().get("Hardware"); } public final String getProcessor() { return getCpuInfoMap().get("Processor"); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/DeviceFingerprint.java
package ai.sensfrx.df; import android.annotation.SuppressLint; import android.app.ActivityManager; import android.app.KeyguardManager; import android.bluetooth.BluetoothAdapter; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.ApplicationInfo; import android.content.pm.ConfigurationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.Point; import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbManager; import android.media.AudioManager; import android.net.ConnectivityManager; import android.net.Network; import android.net.NetworkCapabilities; import android.os.BatteryManager; import android.os.Build; import android.os.Environment; import android.os.PowerManager; import android.os.StatFs; import android.provider.Settings; import android.security.keystore.KeyProperties; import android.telephony.TelephonyManager; import android.text.format.DateFormat; import android.util.DisplayMetrics; import android.util.Log; import android.view.Display; import android.view.WindowManager; import ai.sensfrx.utils.SensfrxLogger; import ai.sensfrx.utils.Utils; import java.io.BufferedReader; import java.io.File; import java.io.FileFilter; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.RandomAccessFile; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.Inet4Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Scanner; import java.util.TimeZone; import java.util.regex.Pattern; abstract class DeviceFingerprint { /** * This is super class for all the device fingerprinting related operation */ private final Context context; /** * Required Constructor * * @param context as para */ public DeviceFingerprint(Context context) { this.context = context; } public boolean isMockLocationEnabled() { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { return "0".equals(Settings.Secure.getString(context.getContentResolver(), Settings.Secure.ALLOW_MOCK_LOCATION)); } else { PackageManager pm = context.getPackageManager(); List<ApplicationInfo> packages = pm.getInstalledApplications(PackageManager.GET_META_DATA); if (packages != null) { for (ApplicationInfo applicationInfo : packages) { try { PackageInfo packageInfo = pm.getPackageInfo(applicationInfo.packageName, PackageManager.GET_PERMISSIONS); // Get Permissions String[] requestedPermissions = packageInfo.requestedPermissions; if (requestedPermissions != null) { for (int i = 0; i < requestedPermissions.length; i++) { if (requestedPermissions[i].equals("android.permission.ACCESS_MOCK_LOCATION") && !applicationInfo.packageName.equals(context.getPackageName())) { return true; } } } } catch (PackageManager.NameNotFoundException e) { Log.e("Mock location check", e.getMessage()); } } } return false; } } public String getAppsWithMockPermission(Context context) { PackageManager packageManager = context.getPackageManager(); List<String> appsWithMockPermission = new ArrayList<>(); List<ApplicationInfo> installedApps = packageManager.getInstalledApplications(PackageManager.GET_META_DATA); for (ApplicationInfo appInfo : installedApps) { try { PackageInfo packageInfo = packageManager.getPackageInfo(appInfo.packageName, PackageManager.GET_PERMISSIONS); String[] permissions = packageInfo.requestedPermissions; if (permissions != null && containsMockLocationPermission(permissions)) { appsWithMockPermission.add(appInfo.packageName); } } catch (PackageManager.NameNotFoundException e) { SensfrxLogger.e("Error encountered while retrieving applications with mock permissions."); } } return String.join(", ", appsWithMockPermission); } private boolean containsMockLocationPermission(String[] permissions) { if (permissions != null) { for (String permission : permissions) { if ("android.permission.ACCESS_MOCK_LOCATION".equals(permission)) { return true; } } } return false; } public String getNightModeStatus() { int nightMode = context.getResources().getConfiguration().uiMode & android.content.res.Configuration.UI_MODE_NIGHT_MASK; switch (nightMode) { case android.content.res.Configuration.UI_MODE_NIGHT_YES: return "1"; case android.content.res.Configuration.UI_MODE_NIGHT_NO: return "0"; case android.content.res.Configuration.UI_MODE_NIGHT_UNDEFINED: return "undefined"; default: return "Unknown"; } } public int getScreenBrightness() { ContentResolver contentResolver = context.getContentResolver(); try { //SensfrxLogger.d( "Current Brightness Level: " + brightnessLevel); return Settings.System.getInt(contentResolver, Settings.System.SCREEN_BRIGHTNESS); } catch (Settings.SettingNotFoundException e) { //SensfrxLogger.e( "Error getting screen brightness " + e); return -1; } } public boolean isPowerSavingModeEnabled(Context context) { PowerManager powerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE); if (powerManager != null) { return powerManager.isPowerSaveMode(); } return false; } public boolean isDNDModeEnabled(Context context) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); if (audioManager != null) { return audioManager.getRingerMode() == AudioManager.RINGER_MODE_SILENT || audioManager.getRingerMode() == AudioManager.RINGER_MODE_VIBRATE; } } return false; } @SuppressLint("DefaultLocale") public String getAllVolumeLevelsInfo() { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); int alarmVolume = audioManager.getStreamVolume(AudioManager.STREAM_ALARM); int dtmfVolume = audioManager.getStreamVolume(AudioManager.STREAM_DTMF); int musicVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC); int notificationVolume = audioManager.getStreamVolume(AudioManager.STREAM_NOTIFICATION); int ringVolume = audioManager.getStreamVolume(AudioManager.STREAM_RING); int systemVolume = audioManager.getStreamVolume(AudioManager.STREAM_SYSTEM); int callVolume = audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); return String.format("Alarm volume level: %d," + "DTMF volume level: %d," + "Music volume level: %d," + "Notification volume level: %d," + "Ring volume level: %d," + "System volume level: %d," + "Call volume level: %d", alarmVolume, dtmfVolume, musicVolume, notificationVolume, ringVolume, systemVolume, callVolume); } public boolean isDualSim(Context context) { TelephonyManager telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); if (telephonyManager != null) { int simCount = 0; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) { simCount = telephonyManager.getPhoneCount(); } return simCount > 1; } return false; } public String checkBluetoothStatus() { BluetoothAdapter bluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); if (bluetoothAdapter == null) { return "not supported"; } if (bluetoothAdapter.isEnabled()) { return "1"; } else { return "0"; } } public String getAMPMFormat(Context context) { boolean is24HourFormat = DateFormat.is24HourFormat(context); if (is24HourFormat) { return "0"; // No AM/PM indicator for 24-hour format } else { // // Use Calendar to determine if it's AM or PM // Calendar calendar = Calendar.getInstance(); // int amPm = calendar.get(Calendar.AM_PM); // return (amPm == Calendar.AM) ? "AM" : "PM"; return "1"; } } public String getLockTypeString(Context context) { KeyguardManager keyguardManager = (KeyguardManager) context.getSystemService(Context.KEYGUARD_SERVICE); if (keyguardManager != null) { int lockType; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { //Android 6.0 (API level 23) and above lockType = keyguardManager.isDeviceSecure() ? KeyProperties.PURPOSE_ENCRYPT : KeyProperties.PURPOSE_DECRYPT; } else { // For versions below Android 6.0, you might not get detailed lock type information lockType = keyguardManager.isKeyguardSecure() ? 1 : 0; } switch (lockType) { case KeyProperties.PURPOSE_DECRYPT: return "Device is secure (Decrypt)"; case 1: return "Device is secure"; case 0: return "Device is not secure"; default: return "Unknown"; } } return "Unknown"; } public boolean isSimCardLocked(Context context) { TelephonyManager telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); if (telephonyManager != null) { int simState = telephonyManager.getSimState(); return simState == TelephonyManager.SIM_STATE_PIN_REQUIRED || simState == TelephonyManager.SIM_STATE_PUK_REQUIRED; } return false; } public String getManifestPermissions(String packageName) { PackageManager packageManager = context.getPackageManager(); PackageInfo packageInfo = null; try { packageInfo = packageManager.getPackageInfo(packageName, PackageManager.GET_PERMISSIONS); } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); } StringBuilder permissionsStringBuilder = new StringBuilder(); if (packageInfo != null && packageInfo.requestedPermissions != null) { for (String permission : packageInfo.requestedPermissions) { // Check if the permission is not null and not empty before adding it to the list if (permission != null && !permission.trim().isEmpty()) { permissionsStringBuilder.append(permission).append(","); } } } // Remove the trailing comma if permissions were added if (permissionsStringBuilder.length() > 0) { permissionsStringBuilder.deleteCharAt(permissionsStringBuilder.length() - 1); } return permissionsStringBuilder.toString(); } protected boolean isEmulatorOrRealDevice() { String product = Build.PRODUCT; String fingerprint = Build.FINGERPRINT; String manufacturer = Build.MANUFACTURER; String model = Build.MODEL; String brand = Build.BRAND; String device = Build.DEVICE; return (product != null && (product.equals("google_sdk") || product.equals("sdk_google_phone_x86") || product.equals("sdk") || product.equals("sdk_x86") || product.equals("vbox86p"))) || (fingerprint != null && fingerprint.contains("generic")) || (manufacturer != null && manufacturer.contains("Genymotion")) || (model != null && (model.contains("Emulator") || model.contains("Android SDK built for x86"))) || (brand != null && brand.contains("generic") && device != null && device.contains("generic")); } // public boolean isDeveloperModeEnabled() { // ContentResolver contentResolver = context.getContentResolver(); // int adbEnabled = Settings.Secure.getInt(contentResolver, Settings.Secure.ADB_ENABLED, 0); // return adbEnabled == 1; // } @SuppressLint("NewApi") public boolean isDeveloperModeEnabled() { int sdkVersion = android.os.Build.VERSION.SDK_INT; ContentResolver contentResolver = context.getApplicationContext().getContentResolver(); if (sdkVersion >= Build.VERSION_CODES.JELLY_BEAN_MR1) { return android.provider.Settings.Global.getInt(contentResolver, android.provider.Settings.Global.DEVELOPMENT_SETTINGS_ENABLED, 0) != 0; } else { return false; } } protected boolean isVpnConnected(Context context) { ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { Network activeNetwork = cm.getActiveNetwork(); if (activeNetwork != null) { NetworkCapabilities capabilities = cm.getNetworkCapabilities(activeNetwork); return capabilities != null && capabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN); } } else { // For devices below Android M, there is no direct API to check VPN status. // You can implement a workaround like making a network request to a known server // and checking the server's IP address to see if it belongs to a known VPN service. // However, this method is not foolproof and may give false positives/negatives. // For simplicity, we'll just return false. return false; } return false; } protected float getDefaultTimeZoneOffsetInHours() { TimeZone defaultTimeZone = TimeZone.getDefault(); int defaultTimeZoneRawOffset = defaultTimeZone.getRawOffset(); float defaultTimeZoneOffsetInHours = (float) defaultTimeZoneRawOffset / 3600000; return defaultTimeZoneOffsetInHours; } /** * It will return the Combination of * Device Manufacturer and Model * * @return manufacturer + " " + model */ protected String getDeviceName() { String manufacturer = Build.MANUFACTURER; String model = Build.MODEL; if (model.startsWith(manufacturer)) { return model; } else { return manufacturer + " " + model; } } /** * Get the Device orientation * <br></br> * PORTRAIT or LANDSCAPE * * @param context as request * @return orientation */ protected String getDeviceOrientation(Context context) { int orientation = context.getResources().getConfiguration().orientation; return orientation == Configuration.ORIENTATION_PORTRAIT ? "Portrait" : "Landscape"; } /** * Get Device Locale * eg. In, En * * @return locale */ protected String getDeviceLocale() { String locale = null; Locale current = context.getResources().getConfiguration().locale; if (current != null) { locale = current.toString(); } return locale; } /** * Get Device Manufacturer information * * @return manufacturer */ protected String getDeviceManufacturer() { return Build.MANUFACTURER; } /** * Get Device Model * * @return model */ protected String getModel() { return Build.MODEL; } /** * Get Build Fingerprint * <br> * $(BRAND)/$(PRODUCT)/$(DEVICE)/$(BOARD):$(VERSION.RELEASE)/$(ID)/$(VERSION.INCREMENTAL):$(TYPE)/$(TAGS) * * @return build fingerprint */ protected String getFingerprint() { return Build.FINGERPRINT; } /** * Processor Manufacturer * * @return hardware */ protected String getHardware() { return Build.HARDWARE; } /** * Get Brand name * * @return brand */ protected String getBuildBrand() { return Build.BRAND; } /** * Device * <br>Eg. violet,emulator64_x86_64_arm64 * * @return device */ protected String getDevice() { return Build.DEVICE; } /** * Device Board * <br>Eg. goldfish_x86_64,violet * * @return board */ protected String getBoard() { return Build.BOARD; } /** * Get Display Version * * @return display version */ protected String getDisplayVersion() { return Build.DISPLAY; } /** * Get Device USB Host supported or not * * @return host */ protected boolean getUsbHost(Context context) { UsbManager mUsbManager = (UsbManager) context.getSystemService(Context.USB_SERVICE); HashMap<String, UsbDevice> deviceList = mUsbManager.getDeviceList(); return !deviceList.isEmpty(); } /** * Get Device Build Time * * @return time */ protected long getBuildTime() { return Build.TIME; } /** * Get Device Build user * * @return build user */ protected String getBuildUser() { return Build.USER; } /** * Get Device Serial * * @return serial */ protected String getSerial() { return Build.SERIAL; } /** * Get Device Os Version * * @return os version */ protected String getOSVersion() { return Build.VERSION.RELEASE; } /** * Get Device default Language * * @return language */ protected String getLanguage() { return Locale.getDefault().getLanguage(); } /** * Get Device sdk version * * @return sdk */ protected int getSdkVersion() { return Build.VERSION.SDK_INT; } /** * Get Release Version * * @return Release Version */ protected String getReleaseBuildVersion() { return Build.VERSION.RELEASE; } /** * Get Build Version * * @return build */ protected String getBuildVersionCodeName() { return Build.VERSION.CODENAME; } /** * Get Device Screen or Display Height * * @return height */ protected int getScreenHeight() { WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); Point size = new Point(); display.getSize(size); return size.y; } /** * Get Device Screen or Display Width * * @return width */ protected int getScreenWidth() { WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); Point size = new Point(); display.getSize(size); return size.x; } /** * Get phone type Eg. GSM, CDMA * * @param context as application context * @return phone type */ protected String getPhoneType(Context context) { TelephonyManager tm = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); String PHONE_TYPE_GSM = "GSM"; String PHONE_TYPE_CDMA = "CDMA"; String PHONE_TYPE_NONE = "Unknown"; switch (tm.getPhoneType()) { case TelephonyManager.PHONE_TYPE_GSM: return PHONE_TYPE_GSM; case TelephonyManager.PHONE_TYPE_CDMA: return PHONE_TYPE_CDMA; case TelephonyManager.PHONE_TYPE_NONE: default: return PHONE_TYPE_NONE; } } /** * Get Base Band Version * * @return base band */ protected String getBaseBandVersion() { return Build.getRadioVersion(); } /** * Get Operator information * <br>Basically sim information Eg. Airtel, Jio * * @return operator */ protected String getOperator() { String operatorName; TelephonyManager telephonyManager = ((TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE)); operatorName = telephonyManager.getNetworkOperatorName(); if (operatorName == null) operatorName = telephonyManager.getSimOperatorName(); return operatorName; } /** * Get the device ringer mode * <br>Eg. Normal, Silent, Vibration * * @return ringer mode */ protected String getDeviceRingerMode() { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); String RINGER_MODE_NORMAL = "Normal"; String RINGER_MODE_SILENT = "Silent"; String RINGER_MODE_VIBRATE = "Vibrate"; switch (audioManager.getRingerMode()) { case AudioManager.RINGER_MODE_SILENT: return RINGER_MODE_SILENT; case AudioManager.RINGER_MODE_VIBRATE: return RINGER_MODE_VIBRATE; default: return RINGER_MODE_NORMAL; } } /** * Get the device root status * * @return root status */ protected boolean isDeviceRooted() { String[] paths = {"/system/app/Superuser.apk", "/sbin/su", "/system/bin/su", "/system/xbin/su", "/data/local/xbin/su", "/data/local/bin/su", "/system/sd/xbin/su", "/system/bin/failsafe/su", "/data/local/su", "/su/bin/su"}; for (String path : paths) { if (new File(path).exists()) return true; } return false; } /** * Get security patch level of the device; * * @return security patch */ protected String getSecurityPatchLevel() { StringBuilder str = new StringBuilder(); try { Process process = new ProcessBuilder().command("/system/bin/getprop").redirectErrorStream(true).start(); InputStream is = process.getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line; while ((line = br.readLine()) != null) { str.append(line).append("\n"); if (str.toString().contains("security_patch")) { String[] splitter = line.split(":"); if (splitter.length == 2) { return splitter[1]; } break; } } br.close(); process.destroy(); } catch (IOException e) { SensfrxLogger.e("Exception in Security Patch Level >> " + e.getMessage()); } return str.toString(); } /** * Get Build host * * @return host */ protected String getBuildHost() { return Build.HOST; } /** * Returns if the device is using dalvik or any other jvm and its version; * * @return dalvik or arm */ protected String getVirtualMachine() { return System.getProperty("java.vm.name") + " " + System.getProperty("java.vm.version"); } /** * Get the kernel version of the device; * * @return kernel version */ protected String getKernelVersion() { return System.getProperty("os.version"); } /** * Get openGL ES Version(Graphic) * * @param context as application * @return openGL version */ protected String getOpenGlVersion(Context context) { ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo(); return configurationInfo.getGlEsVersion(); } /** * Checks if treble is supported; * * @return treble supported or not */ protected String getTreble() { String output = getSystemProperty("ro.treble.enabled"); if (output.equals("true")) { return "Supported"; } else { return "Not Supported"; } } /** * Get Values from android properties * * @param key as parameter * @return value as per system property */ protected String getSystemProperty(String key) { String value = null; try { value = (String) Class.forName("android.os.SystemProperties").getMethod("get", String.class).invoke(null, key); } catch (Exception e) { SensfrxLogger.e("Exception in System Property >> " + e.getMessage()); } return value; } /** * Get the Cpu Architecture 64 bit or 32 * * @return number of cores; */ protected String getCpuArchitecture() { boolean is64Bit = false; for (String abi : Build.SUPPORTED_ABIS) { if (abi.contains("64")) { is64Bit = true; break; } } return is64Bit ? "64-Bit" : "32-Bit"; } /** * Get the number of core in cpu * * @return number of cores; */ protected int getCpuNumOfCores() { return Objects.requireNonNull(new File("/sys/devices/system/cpu/").listFiles(new FileFilter() { public boolean accept(File params) { return Pattern.matches("cpu[0-9]", params.getName()); } })).length; } /** * Get the BogoMIPS unit of the cpu * * @return bogomips */ protected String getBogoMIPS() { String mips = String.valueOf(getCpuInfoMap().get("bogomips")); if (mips.equals("null")) { mips = String.valueOf(getCpuInfoMap().get("BogoMIPS")); } return mips; } /** * @param core of processor * @return governor */ protected String getCPUGovernor(int core) { String governor = ""; String file = "/sys/devices/system/cpu/cpu" + core + "/cpufreq/scaling_governor"; if (new File(file).exists()) { try { BufferedReader bufferedReader = new BufferedReader(new FileReader(new File(file))); governor = bufferedReader.readLine(); bufferedReader.close(); } catch (FileNotFoundException e) { governor = "Not Found"; } catch (IOException e) { SensfrxLogger.e("Exception in CPU Governor >> " + e.getMessage()); } } return governor; } /** * Parses the output of the /proc/cpuinfo file and maps it. Search for specifics * i.e getCpuInfoMap().get('hardware') * * @return cpuMap */ protected Map<String, String> getCpuInfoMap() { Map<String, String> cpuMap = new HashMap<>(); try { Scanner scanner = new Scanner(new File("/proc/cpuinfo")); while (scanner.hasNextLine()) { String[] split = scanner.nextLine().split(": "); //SensfrxLogger.e(Arrays.toString(split)); if (split.length > 1) cpuMap.put(split[0].trim(), split[1].trim()); } } catch (Exception e) { SensfrxLogger.e("Exception in CPU Map : " + e.getMessage()); } return cpuMap; } /** * Get the frequency of custom given number of core. * * @param coreNo as parameter * @return current frequency */ protected int getFrequencyOfCore(int coreNo) { int currentFReq = 0; try { double currentFreq; RandomAccessFile readerCurFreq; readerCurFreq = new RandomAccessFile("/sys/devices/system/cpu/cpu" + coreNo + "/cpufreq/scaling_cur_freq", "r"); String curfreq = readerCurFreq.readLine(); currentFreq = Double.parseDouble(curfreq) / 1000; readerCurFreq.close(); currentFReq = (int) currentFreq; System.out.println(currentFReq + "----------------------------------------------------"); } catch (Exception e) { SensfrxLogger.e("Exception in Frequency Of Core >> " + e.getMessage()); } return currentFReq; } /** * http://developer.android.com/ndk/guides/abis.html * Note that we search for abi:s in preferred order (the ordering of the * Build.SUPPORTED_ABIS list) to avoid e.g. installing arm on an x86 system where arm * emulation is available. * * @return cpuAbi; */ protected String getCpuAbi() { for (String androidArch : Build.SUPPORTED_ABIS) { switch (androidArch) { case "arm64-v8a": return "aarch64"; case "armeabi-v7a": return "arm"; case "x86_64": return "x86_64"; case "x86": return "i686"; } } throw new RuntimeException("Unable to determine arch from Build.SUPPORTED_ABIS = " + Arrays.toString(Build.SUPPORTED_ABIS)); } /** * Get the Maximum frequency of the cpu (used to calculate the percentage for frequency bars); * * @return current maximum frequency */ protected int getMaxCpuFrequency(int core) { int currentFReq = 0; try { double currentFreq; RandomAccessFile readerCurFreq; readerCurFreq = new RandomAccessFile("/sys/devices/system/cpu/cpu" + core + "/cpufreq/cpuinfo_max_freq", "r"); String curfreq = readerCurFreq.readLine(); currentFreq = Double.parseDouble(curfreq) / 1000; readerCurFreq.close(); currentFReq = (int) currentFreq; } catch (Exception e) { SensfrxLogger.e("Exception in Max Frequency Of CPU >> " + e.getMessage()); } return currentFReq; } protected int getCpuUsageInPercentage() { return Utils.calculateCpuUsagePercentage(getFrequencyOfCore(0), getMaxCpuFrequency(0)); } /** * Get Battery Status * * @return battery status */ private Intent getBatteryStatusIntent() { IntentFilter batFilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); return context.registerReceiver(null, batFilter); } /** * Get battery level in integer value * * @return battery level */ protected int getBatteryPercent() { Intent intent = getBatteryStatusIntent(); int rawLevel = intent.getIntExtra(BatteryManager.EXTRA_LEVEL, -1); int scale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, -1); int level = -1; if (rawLevel >= 0 && scale > 0) { level = (rawLevel * 100) / scale; } return level; } /** * Get battery charging status * * @return charger plugged or not */ protected boolean isPhoneCharging() { Intent intent = getBatteryStatusIntent(); int plugged = intent.getIntExtra(BatteryManager.EXTRA_PLUGGED, 0); return plugged == BatteryManager.BATTERY_PLUGGED_AC || plugged == BatteryManager.BATTERY_PLUGGED_USB; } /** * Get the battery health * * @return battery health */ protected String getBatteryHealth() { String BATTERY_HEALTH_UNKNOWN = "Unknown"; String health = BATTERY_HEALTH_UNKNOWN; Intent intent = getBatteryStatusIntent(); int status = intent.getIntExtra(BatteryManager.EXTRA_HEALTH, 0); String BATTERY_HEALTH_COLD = "cold"; String BATTERY_HEALTH_DEAD = "dead"; String BATTERY_HEALTH_GOOD = "good"; String BATTERY_HEALTH_OVERHEAT = "Over Heat"; String BATTERY_HEALTH_OVER_VOLTAGE = "Over Voltage"; String BATTERY_HEALTH_UNSPECIFIED_FAILURE = "Unspecified failure"; switch (status) { case BatteryManager.BATTERY_HEALTH_COLD: health = BATTERY_HEALTH_COLD; break; case BatteryManager.BATTERY_HEALTH_DEAD: health = BATTERY_HEALTH_DEAD; break; case BatteryManager.BATTERY_HEALTH_GOOD: health = BATTERY_HEALTH_GOOD; break; case BatteryManager.BATTERY_HEALTH_OVERHEAT: health = BATTERY_HEALTH_OVERHEAT; break; case BatteryManager.BATTERY_HEALTH_OVER_VOLTAGE: health = BATTERY_HEALTH_OVER_VOLTAGE; break; case BatteryManager.BATTERY_HEALTH_UNKNOWN: health = BATTERY_HEALTH_UNKNOWN; break; case BatteryManager.BATTERY_HEALTH_UNSPECIFIED_FAILURE: health = BATTERY_HEALTH_UNSPECIFIED_FAILURE; break; } return health; } /** * Get the battery technology * Ex. Li-ion * * @return battery technology */ protected String getBatteryTechnology() { IntentFilter intentFilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); Intent batteryStatus = context.registerReceiver(null, intentFilter); if (batteryStatus != null) { return batteryStatus.getStringExtra(BatteryManager.EXTRA_TECHNOLOGY); } return "Unknown"; } /** * Get battery temperature * * @return temperature */ protected float getBatteryTemperature() { Intent intent = getBatteryStatusIntent(); int temperature = intent.getIntExtra(BatteryManager.EXTRA_TEMPERATURE, 0); return (float) (temperature / 10.0); } /** * Get charging source * * @return source */ protected String getChargingSource() { Intent intent = getBatteryStatusIntent(); int plugged = intent.getIntExtra(BatteryManager.EXTRA_PLUGGED, 0); String BATTERY_PLUGGED_AC = "Charging via AC"; String BATTERY_PLUGGED_WIRELESS = "Wireless"; String BATTERY_PLUGGED_USB = "Charging via USB"; String BATTERY_PLUGGED_UNKNOWN = "Unknown Source"; switch (plugged) { case BatteryManager.BATTERY_PLUGGED_AC: return BATTERY_PLUGGED_AC; case BatteryManager.BATTERY_PLUGGED_USB: return BATTERY_PLUGGED_USB; case BatteryManager.BATTERY_PLUGGED_WIRELESS: return BATTERY_PLUGGED_WIRELESS; default: return BATTERY_PLUGGED_UNKNOWN; } } /** * Gets battery capacity in mAh * * @param context as para * @return capacity in mah */ // protected double getBatteryCapacity(Context context) { // Object mPowerProfile; // double batteryCapacity = 0; // final String POWER_PROFILE_CLASS = "com.android.internal.os.PowerProfile"; // // try { // mPowerProfile = Class.forName(POWER_PROFILE_CLASS).getConstructor(Context.class).newInstance(context); // batteryCapacity = (double) Class.forName(POWER_PROFILE_CLASS).getMethod("getBatteryCapacity").invoke(mPowerProfile); // // } catch (Exception e) { // SensfrxLogger.e("Exception in Battery Capacity >> " + e.getMessage()); // } // return batteryCapacity; // } protected double getBatteryCapacity(Context context) { double batteryCapacity = 0.0; final String POWER_PROFILE_CLASS = "com.android.internal.os.PowerProfile"; try { // Load the PowerProfile class and create an instance Class<?> powerProfileClass = Class.forName(POWER_PROFILE_CLASS); Object powerProfileInstance = powerProfileClass.getConstructor(Context.class).newInstance(context); // Get the method to retrieve battery capacity Method getBatteryCapacityMethod = powerProfileClass.getMethod("getBatteryCapacity"); // Invoke the method and cast the result to double Object result = getBatteryCapacityMethod.invoke(powerProfileInstance); if (result instanceof Double) { batteryCapacity = (Double) result; } else { SensfrxLogger.e("Unexpected result type: " + result.getClass().getName()); } } catch (ClassNotFoundException e) { SensfrxLogger.e("PowerProfile class not found: " + e.getMessage()); } catch (NoSuchMethodException e) { SensfrxLogger.e("Method not found in PowerProfile class: " + e.getMessage()); } catch (InstantiationException e) { SensfrxLogger.e("Failed to instantiate PowerProfile: " + e.getMessage()); } catch (IllegalAccessException e) { SensfrxLogger.e("Illegal access to PowerProfile: " + e.getMessage()); } catch (InvocationTargetException e) { SensfrxLogger.e("Error invoking method on PowerProfile: " + e.getMessage()); } return batteryCapacity; } /** * Get available memory size * * @return memory */ protected long getAvailableInternalMemorySize() { File path = Environment.getDataDirectory(); StatFs stat = new StatFs(path.getPath()); long blockSize, availableBlocks; blockSize = stat.getBlockSizeLong(); availableBlocks = stat.getAvailableBlocksLong(); return availableBlocks * blockSize; } /** * Get total memory size * * @return memory */ protected long getTotalInternalMemorySize() { File path = Environment.getDataDirectory(); StatFs stat = new StatFs(path.getPath()); long blockSize; long totalBlocks; blockSize = stat.getBlockSizeLong(); totalBlocks = stat.getBlockCountLong(); return totalBlocks * blockSize; } /** * Calculates the screen size in inches; * * @return resolution */ protected String getResolution() { int widthPixels = 0; int heightPixels = 0; WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); DisplayMetrics displayMetrics = new DisplayMetrics(); display.getMetrics(displayMetrics); try { Point realSize = new Point(); Display.class.getMethod("getRealSize", Point.class).invoke(display, realSize); widthPixels = realSize.x; heightPixels = realSize.y; } catch (Exception e) { SensfrxLogger.e("Exception in Screen Resolution >> " + e.getMessage()); } double x = Math.pow(widthPixels / displayMetrics.xdpi, 2); double y = Math.pow(heightPixels / displayMetrics.ydpi, 2); double screenSize = Math.sqrt(x + y); return String.format(Locale.ENGLISH, "%.2f", screenSize); } /** * Returns dpi of the current screen the app is shown. * * @return dpi */ protected String getDPI() { DisplayMetrics metrics = context.getResources().getDisplayMetrics(); int densityDPI = (int) (metrics.density * 160f); return densityDPI + " DPI"; } /** * Returns the refresh rate of the device; * * @return refresh rate */ protected String getRefreshRate() { Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); float refreshValue = display.getRefreshRate(); return String.format(Locale.ENGLISH, "%.2f", refreshValue) + "Hz"; } /** * Get Internet connection type * Ex. Wifi or mobile internet * * @param context as parameter * @return connection typr */ protected String isConnectedToWifiOrMobile(Context context) { ConnectivityManager connManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); if ((connManager.getNetworkInfo(ConnectivityManager.TYPE_MOBILE) != null && connManager.getNetworkInfo(ConnectivityManager.TYPE_MOBILE).isConnected())) { return "Mobile"; } else if ((connManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI) != null && connManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI).isConnected())) { return "Wifi"; } else return "Unknown"; } /** * Get The Ip address * * @return ip */ protected String getLocalIpAddress() { try { for (Enumeration<NetworkInterface> enumeration = NetworkInterface.getNetworkInterfaces(); enumeration.hasMoreElements(); ) { NetworkInterface networkInterface = enumeration.nextElement(); for (Enumeration<InetAddress> enumeration1 = networkInterface.getInetAddresses(); enumeration1.hasMoreElements(); ) { InetAddress inetAddress = enumeration1.nextElement(); if (!inetAddress.isLoopbackAddress() && inetAddress instanceof Inet4Address) { return inetAddress.getHostAddress(); } } } } catch (Exception e) { SensfrxLogger.e("Exception in Local Ip Address >> " + e.getMessage()); } return null; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/DeviceInformation.java
package ai.sensfrx.df; import android.content.Context; /** * This class is responsible for fetching the device information */ public class DeviceInformation extends DeviceFingerprint { public DeviceInformation(Context context) { super(context); } @Override public boolean isMockLocationEnabled() { return super.isMockLocationEnabled(); } @Override public String getAppsWithMockPermission(Context context) { return super.getAppsWithMockPermission(context); } @Override public String getNightModeStatus() { return super.getNightModeStatus(); } @Override public int getScreenBrightness() { return super.getScreenBrightness(); } @Override public boolean isPowerSavingModeEnabled(Context context) { return super.isPowerSavingModeEnabled(context); } @Override public String getAllVolumeLevelsInfo() { return super.getAllVolumeLevelsInfo(); } @Override public boolean isDNDModeEnabled(Context context) { return super.isDNDModeEnabled(context); } @Override public boolean isDualSim(Context context) { return super.isDualSim(context); } @Override public String checkBluetoothStatus() { return super.checkBluetoothStatus(); } @Override public String getAMPMFormat(Context context) { return super.getAMPMFormat(context); } @Override public String getLockTypeString(Context context) { return super.getLockTypeString(context); } @Override public boolean isSimCardLocked(Context context) { return super.isSimCardLocked(context); } @Override public String getManifestPermissions(String packageName) { return super.getManifestPermissions(packageName); } @Override public boolean isEmulatorOrRealDevice() { return super.isEmulatorOrRealDevice(); } @Override public boolean isDeveloperModeEnabled() { return super.isDeveloperModeEnabled(); } @Override public boolean isVpnConnected(Context context) { return super.isVpnConnected(context); } @Override public float getDefaultTimeZoneOffsetInHours() { return super.getDefaultTimeZoneOffsetInHours(); } @Override public final String getDeviceName() { return super.getDeviceName(); } @Override public final String getBuildBrand() { return super.getBuildBrand(); } @Override public final String getModel() { return super.getModel(); } @Override public final String getFingerprint() { return super.getFingerprint(); } @Override public final String getDeviceRingerMode() { return super.getDeviceRingerMode(); } @Override public final String getSerial() { return super.getSerial(); } @Override public String getDeviceManufacturer() { return super.getDeviceManufacturer(); } public final String getProcessor() { // return getCpuInfoMap().get("Processor"); return System.getProperty("os.arch"); } @Override public String getDeviceOrientation(Context context) { return super.getDeviceOrientation(context); } @Override public final String getOSVersion() { return super.getOSVersion(); } @Override public final int getSdkVersion() { return super.getSdkVersion(); } @Override public final String getLanguage() { return super.getLanguage(); } @Override public final String getDeviceLocale() { return super.getDeviceLocale(); } @Override public final String getDevice() { return super.getDevice(); } @Override public final String getBoard() { return super.getBoard(); } @Override public final String getDisplayVersion() { return super.getDisplayVersion(); } /** * Get USB Host Technology supported or not * * @param context as para * @return isSupported */ public final String isUsbHostSupported(Context context) { String isSupported = null; if (getUsbHost(context)) { isSupported = "Supported"; } else isSupported = "Not Supported"; return isSupported; } @Override public boolean getUsbHost(Context context) { return super.getUsbHost(context); } /** * Get Phone type * Ex. Gsm, CDMA * * @param context as para * @return operator */ @Override public final String getPhoneType(Context context) { return super.getPhoneType(context); } /** * Get operator * * @return operator */ @Override public final String getOperator() { return super.getOperator(); } @Override public final int getScreenHeight() { return super.getScreenHeight(); } @Override public final int getScreenWidth() { return super.getScreenWidth(); } @Override public String getDPI() { return super.getDPI(); } @Override public String getResolution() { return super.getResolution(); } @Override public final String getRefreshRate() { return super.getRefreshRate(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/HookDetectionCheck.java
package ai.sensfrx.df; import android.app.ActivityManager; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import java.util.Arrays; import java.util.List; public class HookDetectionCheck { public static String[] dangerousPackages = {"de.robv.android.xposed.installer", "com.saurik.substrate", "de.robv.android.xposed"}; /** * Detects if there is any suspicious installed application. * * @return <code>true</code> if some bad application is installed, <code>false</code> otherwise. */ public static boolean hookDetected(Context context) { PackageManager packageManager = context.getPackageManager(); List<ApplicationInfo> applicationInfoList = packageManager.getInstalledApplications(PackageManager.GET_META_DATA); if (applicationInfoList != null) { for (ApplicationInfo applicationInfo : applicationInfoList) { if (Arrays.asList(dangerousPackages).contains(applicationInfo.packageName)) { return true; } } } return advancedHookDetection(context); } private static boolean advancedHookDetection(Context context) { try { throw new Exception(); } catch (Exception e) { int zygoteInitCallCount = 0; for (StackTraceElement stackTraceElement : e.getStackTrace()) { if (stackTraceElement.getClassName().equals("com.android.internal.os.ZygoteInit")) { zygoteInitCallCount++; if (zygoteInitCallCount == 2) { return true; } } if (stackTraceElement.getClassName().equals("com.saurik.substrate.MS$2") && stackTraceElement.getMethodName().equals("invoked")) { return true; } if (stackTraceElement.getClassName().equals("de.robv.android.xposed.XposedBridge") && stackTraceElement.getMethodName().equals("main")) { return true; } if (stackTraceElement.getClassName().equals("de.robv.android.xposed.XposedBridge") && stackTraceElement.getMethodName().equals("handleHookedMethod")) { return true; } } } return checkFrida(context); } private static boolean checkFrida(Context context) { ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); List<ActivityManager.RunningServiceInfo> runningServices = activityManager.getRunningServices(300); if (runningServices != null) { for (int i = 0; i < runningServices.size(); ++i) { if (runningServices.get(i).process.contains("fridaserver")) { return true; } } } return false; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/NetworkInformation.java
package ai.sensfrx.df; import android.content.Context; public class NetworkInformation extends DeviceFingerprint { public NetworkInformation(Context context) { super(context); } @Override public String isConnectedToWifiOrMobile(Context context) { return super.isConnectedToWifiOrMobile(context); } @Override public String getLocalIpAddress() { return super.getLocalIpAddress(); } @Override public final String getOperator() { return super.getOperator(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/RootDetector.java
package ai.sensfrx.df; import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; public class RootDetector { public static boolean isRooted() { return checkRootMethod1() || checkRootMethod2() || checkRootMethod3(); } private static boolean checkRootMethod1() { String[] paths = { "/system/app/Superuser.apk", "/sbin/su", "/system/bin/su", "/system/xbin/su", "/data/local/xbin/su", "/data/local/bin/su", "/system/sd/xbin/su", "/system/bin/failsafe/su", "/data/local/su" }; for (String path : paths) { if (new File(path).exists()) return true; } return false; } private static boolean checkRootMethod2() { Process process = null; try { process = Runtime.getRuntime().exec(new String[] { "/system/xbin/which", "su" }); BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); if (in.readLine() != null) return true; return false; } catch (Throwable t) { return false; } finally { if (process != null) process.destroy(); } } private static boolean checkRootMethod3() { String[] commands = { "/system/xbin/which su", "su", "ls -l /sbin/su", "ls -l /system/bin/su" }; for (String command : commands) { if (canExecuteCommand(command)) return true; } return false; } private static boolean canExecuteCommand(String command) { try { Process process = Runtime.getRuntime().exec(command); if (process.waitFor() == 0) { return true; } else { return false; } } catch (Exception e) { return false; } } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/StorageInformation.java
package ai.sensfrx.df; import android.annotation.SuppressLint; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.os.Build; import java.text.CharacterIterator; import java.text.StringCharacterIterator; public class StorageInformation extends DeviceFingerprint { public StorageInformation(Context context) { super(context); } @Override protected final long getAvailableInternalMemorySize() { return super.getAvailableInternalMemorySize(); } @Override protected final long getTotalInternalMemorySize() { return super.getTotalInternalMemorySize(); } public final String getReadableAvailableInternalMemorySize() { return readableByteCountBin(getAvailableInternalMemorySize()); } public final String getReadableTotalInternalMemorySize() { return readableByteCountBin(getTotalInternalMemorySize()); } /** * Calculate the percentage for storage * * @param toCalculate as para * @param maximum as para * @return percentage */ private long calculateLongPercentage(long toCalculate, long maximum) { if (maximum != 0) { return (100 * toCalculate) / maximum; } else return 30; } /** * @return memory in percentage */ public final long getUsedMemoryInPercentage() { return calculateLongPercentage(getTotalInternalMemorySize() - getAvailableInternalMemorySize(), getTotalInternalMemorySize()); } @SuppressLint("DefaultLocale") private String readableByteCountBin(long bytes) { long absB = bytes == Long.MIN_VALUE ? Long.MAX_VALUE : Math.abs(bytes); if (absB < 1024) { return bytes + " B"; } long value = absB; CharacterIterator ci = new StringCharacterIterator("KMGTPE"); for (int i = 40; i >= 0 && absB > 0xfffccccccccccccL >> i; i -= 10) { value >>= 10; ci.next(); } value *= Long.signum(bytes); return String.format("%.1f %ciB", value / 1024.0, ci.current()); } public boolean isOnExternalStorage(Context context) { // check for API level 8 and higher if (Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.ECLAIR_MR1) { PackageManager pm = context.getPackageManager(); try { PackageInfo pi = pm.getPackageInfo(context.getPackageName(), 0); ApplicationInfo ai = pi.applicationInfo; return (ai.flags & ApplicationInfo.FLAG_EXTERNAL_STORAGE) == ApplicationInfo.FLAG_EXTERNAL_STORAGE; } catch (PackageManager.NameNotFoundException e) { // ignore } } // check for API level 7 - check files dir try { String filesDir = context.getFilesDir().getAbsolutePath(); if (filesDir.startsWith("/data/")) { return false; } else if (filesDir.contains("/mnt/") || filesDir.contains("/sdcard/")) { return true; } } catch (Throwable e) { // ignore } return false; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/df/SystemInformation.java
package ai.sensfrx.df; import android.content.Context; import android.os.SystemClock; import java.util.concurrent.TimeUnit; public class SystemInformation extends DeviceFingerprint { /** * This class is responsible for fetching the System information */ public SystemInformation(Context context) { super(context); } @Override public final String getOSVersion() { return super.getOSVersion(); } @Override public final int getSdkVersion() { return super.getSdkVersion(); } @Override public final boolean isDeviceRooted() { return super.isDeviceRooted(); } @Override public String getDeviceRingerMode() { return super.getDeviceRingerMode(); } @Override public final String getSecurityPatchLevel() { return super.getSecurityPatchLevel(); } @Override public final String getBuildHost() { return super.getBuildHost(); } @Override public final String getBaseBandVersion() { return super.getBaseBandVersion(); } @Override public String getVirtualMachine() { return super.getVirtualMachine(); } @Override public final String getKernelVersion() { return super.getKernelVersion(); } @Override public final String getOpenGlVersion(Context context) { return super.getOpenGlVersion(context); } /** * Time format for calculate the system up time * @param millis as para * @return time */ private String formatTime(long millis) { long seconds = Math.round((double) millis / 1000); long hours = TimeUnit.SECONDS.toHours(seconds); if (hours > 0) seconds -= TimeUnit.HOURS.toSeconds(hours); long minutes = seconds > 0 ? TimeUnit.SECONDS.toMinutes(seconds) : 0; if (minutes > 0) seconds -= TimeUnit.MINUTES.toSeconds(minutes); return hours > 0 ? String.format("%02d:%02d:%02d", hours, minutes, seconds) : String.format("%02d:%02d", minutes, seconds); } /** * * @return systemUpTime */ public final String getSystemUpTime() { long millis = SystemClock.uptimeMillis(); return formatTime(millis); } @Override public final String getTreble() { return super.getTreble(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/em/DeviceCoordinate.java
package ai.sensfrx.em; import android.content.Context; import android.content.res.Configuration; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import ai.sensfrx.utils.SensfrxLogger; import java.lang.ref.WeakReference; public class DeviceCoordinate implements SensorEventListener { private static volatile DeviceCoordinate mInstance = null; public static String X = "0", Y = "0", Z = "0"; private final SensorManager mSensorManager; private final float[] values = new float[3]; private WeakReference<Context> mContextRef; /** * This class is responsible for fetching the gyroscope coordinates * * @param mContext as para */ private DeviceCoordinate(Context mContext) { mContextRef = new WeakReference<>(mContext.getApplicationContext()); mSensorManager = (SensorManager) mContext.getSystemService(Context.SENSOR_SERVICE); } /** * Using Singleton pattern here * * @param context as para * @return mInstance */ public static DeviceCoordinate getInstance(Context context) { if (mInstance == null) { synchronized (DeviceCoordinate.class) { if (mInstance == null) { mInstance = new DeviceCoordinate(context.getApplicationContext()); } } } return mInstance; } /** * Get the Device support gyroscope technology * * @return isSupport */ public boolean getGyroscopeSupport() { boolean isSupport; Context context = mContextRef.get(); SensorManager sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE); if (sensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE) != null) { SensfrxLogger.d("GYROSCOPE supports"); isSupport = true; } else { isSupport = false; SensfrxLogger.d("no GYROSCOPE supports"); } return isSupport; } /** * Start getting device gyroscope logs using ACCELEROMETER sensor */ public void start() { mSensorManager.registerListener(this, mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL); } /** * stop getting logs from ACCELEROMETER sensor */ public void stop() { mSensorManager.unregisterListener(this); } /** * Get the Device orientation * <br></br> * PORTRAIT or LANDSCAPE * * @param context as request * @return orientation */ public static String getDeviceOrientation(Context context) { int orientation = context.getResources().getConfiguration().orientation; return orientation == Configuration.ORIENTATION_PORTRAIT ? "Portrait" : "Landscape"; } @Override public void onSensorChanged(SensorEvent sensorEvent) { if (sensorEvent.accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) { return; } try { values[0] = sensorEvent.values[0]; values[1] = sensorEvent.values[1]; values[2] = sensorEvent.values[2]; X = "" + sensorEvent.values[0]; Y = "" + sensorEvent.values[1]; Z = "" + sensorEvent.values[2]; stop(); } catch (Exception e) { SensfrxLogger.e("Exception in Device Gyroscope : " + e.getMessage()); } } @Override public void onAccuracyChanged(Sensor sensor, int i) { } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/em/EventAdapter.java
package ai.sensfrx.em; import ai.sensfrx.data.model.AppEvent; import ai.sensfrx.data.model.CallEvent; import ai.sensfrx.data.model.ClickEvent; import ai.sensfrx.data.model.Event; import ai.sensfrx.data.model.LocationInfo; import ai.sensfrx.data.model.ScreenChangeEvent; import com.google.gson.Gson; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonParseException; import java.lang.reflect.Type; public class EventAdapter implements JsonDeserializer<Event> { private static final Gson gson = new Gson(); /** * creating json of the model depend on the which type of event is occurs * <br></br> * eg. if screen change event is occurs then ScreenChangeEvent model instantiated * * @param json as para * @param typeOfT as para * @param context as para * @return json */ @Override public Event deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { if (typeOfT.equals(Event.class)) { String eventType = json.getAsJsonObject().get("event_type").getAsString(); switch (eventType) { case Event.EVENT_TYPE_APP: typeOfT = AppEvent.class; break; case Event.EVENT_TYPE_LOCATION: typeOfT = LocationInfo.class; break; case Event.EVENT_TYPE_SCREEN: typeOfT = ScreenChangeEvent.class; break; case Event.EVENT_TYPE_CLICK: typeOfT = ClickEvent.class; break; case Event.EVENT_ON_GOING_CALL: typeOfT = CallEvent.class; break; } } return gson.fromJson(json, typeOfT); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/em/EventQueueManager.java
package ai.sensfrx.em; import android.app.Application; import android.content.Context; import ai.sensfrx.Sensfrx; import ai.sensfrx.utils.SensfrxLogger; import ai.sensfrx.data.MainViewModel; import ai.sensfrx.data.model.Event; import ai.sensfrx.data.model.LocationInfo; import ai.sensfrx.network.GsonConverter; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.squareup.tape2.ObjectQueue; import com.squareup.tape2.QueueFile; import org.json.JSONObject; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class EventQueueManager { private static final String BATCH_QUEUE_FILENAME = "auth-safe-queue"; private static final String QUEUE_FILENAME = "auth-safe-monitor-queue"; private static final int MAX_SIZE = 20; private ObjectQueue<Event> eventObjectQueue; private ExecutorService executor; private final Context context; private int retryCount = 0; private int currentQueueSize = MAX_SIZE; private static final int MAX_QUEUE_SIZE = 100; private static final int MAX_RETRY_COUNT = 5; private boolean isLoggingInProgress = false; // Flag to prevent concurrent logging // Constructor to initialize the EventQueueManager with the given context public EventQueueManager(Context context) { this.context = context.getApplicationContext(); try { // Initialize the event queue initEventQueue(); } catch (IOException e) { // Log the error and delete the queue file in case of failure SensfrxLogger.e("Failed to create queue", e); getFile().delete(); try { // Retry initializing the event queue after deleting the queue file initEventQueue(); } catch (IOException eRetry) { // Log the error if the retry fails SensfrxLogger.e("Deleted queue file. Retried. Failed.", eRetry); } } } // Method to get the queue file with default filename private File getFile() { return getFile(QUEUE_FILENAME); } // Method to get the queue file with specified filename private File getFile(String filename) { return new File(context.getFilesDir().getAbsolutePath(), filename); } // Method to initialize the event queue private void initEventQueue() throws IOException { executor = Executors.newSingleThreadExecutor(); try { // Delete the batch queue file if it exists getFile(BATCH_QUEUE_FILENAME).delete(); } catch (Exception e) { e.printStackTrace(); } // Create a queue file and initialize the event object queue File file = getFile(); QueueFile queueFile = new QueueFile.Builder(file).build(); eventObjectQueue = ObjectQueue.create(queueFile, new GsonConverter<>(Event.class)); } // Method to add an event to the queue public synchronized void add(Event event) { try { // Log the current queue size SensfrxLogger.d("Queue Size :: " + eventObjectQueue.size() + " out of " + currentQueueSize); eventObjectQueue.add(event); // If the queue size reaches the limit and logging is not in progress, log the events to the server if (eventObjectQueue.size() >= currentQueueSize && !isLoggingInProgress) { SensfrxLogger.d("Queue Ready :: " + eventObjectQueue.asList()); logEventsToServer(); } } catch (IOException e) { // Log an error if adding to the queue fails SensfrxLogger.e("Add to queue failed", e); } } // Method to log events to the server public synchronized void logEventsToServer() { // Exit if logging is already in progress if (isLoggingInProgress) { return; } isLoggingInProgress = true; SensfrxLogger.d("EventQueueManager size " + eventObjectQueue.size()); if (!eventObjectQueue.isEmpty()) { // Determine the number of events to log based on the current queue size int currentSize = eventObjectQueue.size(); int end = Math.min(currentQueueSize, currentSize); List<Event> subList = new ArrayList<>(end); Iterator<Event> iterator = eventObjectQueue.iterator(); // Create a sublist of events to log for (int i = 0; i < end; i++) { try { Event event = iterator.next(); if (event != null) { subList.add(event); } } catch (Exception exception) { // Log an error if unable to read from the queue SensfrxLogger.e("Unable to read from queue", exception); } catch (Error error) { // Log an error if unable to read from the queue SensfrxLogger.e("Unable to read from queue", error); } } List<Event> events = Collections.unmodifiableList(subList); JsonObject combinedObj = new JsonObject(); try { Gson gson = new Gson(); // Add location and device logs to the combined object LocationInfo locationObj = Sensfrx.locationLogEvent(context); JsonObject deviceLogsObj = Sensfrx.getDeviceLogs(); combinedObj.add("location", gson.toJsonTree(locationObj)); combinedObj.add("device_logs", deviceLogsObj); JsonElement eventsElement = gson.toJsonTree(events); combinedObj.add("events", eventsElement); //SensfrxLogger.d("logEventsToServer: " + combinedObj); } catch (Exception e) { // Log an error if combining the data fails SensfrxLogger.e("logEventsToServer: " + e.getMessage()); } // Push the combined data to the server and observe the response MainViewModel mainViewModel = new MainViewModel((Application) context); mainViewModel.pushEvents(combinedObj).observeForever(response -> { SensfrxLogger.d("EventQueueManager Response :> " + response); try { // Parse the response to check if events were successfully logged JSONObject responseJson = new JSONObject(new Gson().toJson(response)); if (response != null && responseJson.getString("status").equalsIgnoreCase("200")) { // If successful, clear the queue and reset variables SensfrxLogger.d("Events pushed successfully. Clearing the queue."); try { clear(); resetVariables(); } catch (IOException e) { SensfrxLogger.e("Clearing the queue failed", e); } } else { // If logging fails, increment the retry count and try again with an extended queue size retryCount++; if (retryCount <= MAX_RETRY_COUNT) { SensfrxLogger.d("Events push failed. Retrying with an extended queue size."); extendQueueSize(); } else { // If the maximum retry count is reached, clear the queue and reset variables SensfrxLogger.d("Maximum retry count reached. Clearing the queue."); try { clear(); resetVariables(); } catch (IOException e) { SensfrxLogger.e("Clearing the queue failed", e); } } } } catch (Exception exception) { // Log an error if parsing the response fails SensfrxLogger.e("Event Not Logged : " + exception.getMessage()); } finally { // Reset the logging flag to false isLoggingInProgress = false; } }); } else { // Reset the logging flag to false if the queue is empty isLoggingInProgress = false; } } // Method to get the current size of the event queue public synchronized int size() { return eventObjectQueue.size(); } // Method to clear the event queue public synchronized void clear() throws IOException { eventObjectQueue.clear(); } // Method to extend the queue size if necessary private synchronized void extendQueueSize() { if (currentQueueSize + MAX_SIZE <= MAX_QUEUE_SIZE) { currentQueueSize += MAX_SIZE; } else { currentQueueSize = MAX_QUEUE_SIZE; } } // Method to reset the retry count and queue size variables private synchronized void resetVariables() { retryCount = 0; currentQueueSize = MAX_SIZE; } // Method to destroy the event queue and shut down the executor public synchronized void destroyQueue() { try { clear(); } catch (IOException e) { e.printStackTrace(); } executor.shutdown(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/em/LocationEvent.java
package ai.sensfrx.em; import android.content.Context; import android.location.Address; import android.location.Geocoder; import ai.sensfrx.utils.SensfrxLogger; import java.io.IOException; import java.util.List; import java.util.Locale; public class LocationEvent { private double latitude; // latitude private double longitude; // longitude public LocationEvent( double latitude,double longitude) { this.latitude=latitude; this.longitude=longitude; } /** * Function to get latitude */ public double getLatitude() { // return latitude return latitude; } /** * Function to get longitude */ public double getLongitude() { // return longitude return longitude; } /** * Get list of address by latitude and longitude * * @return null or List<Address> */ public List<Address> getGeocoderAddress(Context context) { if (latitude != 0) { Geocoder geocoder = new Geocoder(context, Locale.ENGLISH); try { int geocoderMaxResults = 1; return geocoder.getFromLocation(latitude, longitude, geocoderMaxResults); } catch (IOException e) { SensfrxLogger.e("IOException during geocoding", e); } } return null; } /** * Try to get AddressLine * * @return null or addressLine */ public String getAddressLine(Context context) { List<Address> addresses = getGeocoderAddress(context); if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); return address.getAddressLine(0); } else { return null; } } /** * Try to get Locality * * @return null or locality */ public String getLocality(Context context) { List<Address> addresses = getGeocoderAddress(context); if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); return address.getLocality(); } else { return null; } } /** * Try to get Postal Code * * @return null or postalCode */ public String getPostalCode(Context context) { List<Address> addresses = getGeocoderAddress(context); if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); return address.getPostalCode(); } else { return null; } } /** * Try to get CountryName * * @return null or postalCode */ public String getCountryName(Context context) { List<Address> addresses = getGeocoderAddress(context); if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); return address.getCountryName(); } else { return null; } } /** * Try to get City Name * * @return null or city */ public String getCityName(Context context) { List<Address> addresses = getGeocoderAddress(context); if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); return address.getLocality(); } else { return null; } } /** * Try to get State Name * * @return null or state */ public String getStateName(Context context) { List<Address> addresses = getGeocoderAddress(context); if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); return address.getAdminArea(); } else { return null; } } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/em/TrackAllEvents.java
package ai.sensfrx.em; import android.app.Activity; import android.content.Context; import android.view.MotionEvent; import androidx.fragment.app.Fragment; import ai.sensfrx.Sensfrx; import ai.sensfrx.data.model.ClickEvent; import ai.sensfrx.data.model.AppEvent; import ai.sensfrx.data.model.ScreenChangeEvent; import ai.sensfrx.utils.SensfrxLogger; public class TrackAllEvents { private static String mScreenName = null; private static final int MAX_CLICK_DURATION = 1000; private static final int MAX_CLICK_DISTANCE = 15; private static long pressStartTime; private static int pressedX; private static int pressedY; private static boolean stayedWithinClickDistance; private static final boolean isClick = false; /** * @return mScreenName */ public static String getScreenName() { return mScreenName; } /** * This is for activity * Adding the screen change event in the queue when any screen change * <br></br> * eg MainActivity change to another activity * * @param state as para */ public static void logAppEvent( String state) { AppEvent appEvent = new AppEvent( "ApplicationEvent", Sensfrx.getUid(), new AppEvent.EventData( "" + mScreenName, "" + state) ); Sensfrx.addEventInQueue(appEvent); } /** * This is for activity * Adding the screen change event in the queue when any screen change * <br></br> * eg MainActivity change to another activity * * @param activity as para * @param state as para */ public static void logScreenEvent(Activity activity, String state) { if (activity.getLocalClassName() != null) { mScreenName = activity.getLocalClassName(); } else { mScreenName = activity.getClass().getSimpleName(); } ScreenChangeEvent screenChangeEvent = new ScreenChangeEvent( "screen_change", Sensfrx.getUid(), new ScreenChangeEvent.EventData( "" + mScreenName, "" + state) ); Sensfrx.addEventInQueue(screenChangeEvent); } /** * This is for fragment * Adding the screen change event in the queue when any screen change * <br></br> * eg BlankFragment change to another fragment * * @param fragment as para * @param state as para */ public static void logScreenEvent(Fragment fragment, String state) { if (fragment.getClass().getSimpleName() != null) { mScreenName = fragment.getClass().getSimpleName(); } else { mScreenName = fragment.getClass().getName(); } ScreenChangeEvent screenChangeEvent = new ScreenChangeEvent( "screen_change", Sensfrx.getUid(), new ScreenChangeEvent.EventData( "" + mScreenName, "" + state ) ); Sensfrx.addEventInQueue(screenChangeEvent); } /** * This is for application closed * Adding the screen change event in the queue when any screen change * <br></br> * eg MainActivity change to another activity * * @param state as para */ public static void logScreenEvent(String state) { ScreenChangeEvent screenChangeEvent = new ScreenChangeEvent( "screen_change", Sensfrx.getUid(), new ScreenChangeEvent.EventData( "App", "" + state) ); Sensfrx.addEventInQueue(screenChangeEvent); } /** * Adding the click event in the queue when user click on the screen * * @param event as para * @param context as para */ public static void logClickEvent(MotionEvent event, Context context) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: { pressStartTime = System.currentTimeMillis(); pressedX = (int) event.getX(); pressedY = (int) event.getY(); stayedWithinClickDistance = true; break; } case MotionEvent.ACTION_MOVE: { if (stayedWithinClickDistance && distance(pressedX, pressedY, event.getX(), event.getY(), context) > MAX_CLICK_DISTANCE) { stayedWithinClickDistance = false; } break; } case MotionEvent.ACTION_UP: { long pressDuration = System.currentTimeMillis() - pressStartTime; DeviceCoordinate.getInstance(context).start(); if (pressDuration < MAX_CLICK_DURATION && stayedWithinClickDistance) { //SensfrxLogger.d("ACTION_UP " + "X: " + pressedX + " || Y: " + pressedY); ClickEvent clickEvent = new ClickEvent( "click_event", Sensfrx.getUid(), new ClickEvent.EventData( "" + DeviceCoordinate.getDeviceOrientation(context), "ACTION_UP", new ClickEvent.GyroscopCord( "" + DeviceCoordinate.X, "" + DeviceCoordinate.Y, "" + DeviceCoordinate.Z ), new ClickEvent.DisplayCord( "" + pressedX, "" + pressedY ))); Sensfrx.addEventInQueue(clickEvent); } else { //SensfrxLogger.d("MOVE " + "X: " + pressedX + " || Y: " + pressedY); ClickEvent clickEvent = new ClickEvent( "click_event", Sensfrx.getUid(), new ClickEvent.EventData( "" + DeviceCoordinate.getDeviceOrientation(context), "MOVE", new ClickEvent.GyroscopCord( "" + DeviceCoordinate.X, "" + DeviceCoordinate.Y, "" + DeviceCoordinate.Z ), new ClickEvent.DisplayCord( "" + pressedX, "" + pressedY ))); Sensfrx.addEventInQueue(clickEvent); } } } } /** * @return click event occurs */ public static boolean getClickStatus() { return isClick; } private static float distance(float x1, float y1, float x2, float y2, Context context) { float dx = x1 - x2; float dy = y1 - y2; float distanceInPx = (float) Math.sqrt(dx * dx + dy * dy); return pxToDp(distanceInPx, context); } private static float pxToDp(float px, Context context) { return px / context.getResources().getDisplayMetrics().density; } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/network/ApiServices.java
package ai.sensfrx.network; import static ai.sensfrx.utils.Constants.API_SIGN; import static ai.sensfrx.utils.Constants.PACKAGE_NAME_PARA; import static ai.sensfrx.utils.Constants.SECRET_KEY_PARA; import static ai.sensfrx.utils.Constants.TIME_STAMP; import static ai.sensfrx.utils.Constants.TOKEN; import ai.sensfrx.sensfrx.data.model.DeviceInfoResponse; import com.google.gson.JsonObject; import retrofit2.Call; import retrofit2.http.Body; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.POST; public interface ApiServices{ @Headers({"Content-Type: application/json ", "Accept: */*"}) @POST("token") Call<DeviceInfoResponse> requestForToken(@Header(PACKAGE_NAME_PARA) String packageName, @Header(SECRET_KEY_PARA) String secretKey, @Header(TIME_STAMP) String timestamp, @Header(API_SIGN) String sign, @Body JsonObject jsonObject); @Headers({"Content-Type: application/json", "Accept: */*"}) @POST("user_events") Call<JsonObject> logEvents(@Header(PACKAGE_NAME_PARA) String packageName, @Header(SECRET_KEY_PARA) String secretKey, @Header(TOKEN) String token, @Header(TIME_STAMP) String timestamp, @Header(API_SIGN) String sign, @Body JsonObject events); @Headers({"Content-Type: application/json", "Accept: */*"}) @POST("transaction") Call<JsonObject> transaction(@Header(PACKAGE_NAME_PARA) String packageName, @Header(SECRET_KEY_PARA) String secretKey, @Header(TIME_STAMP) String timestamp, @Header(API_SIGN) String sign, @Body JsonObject transactionJson); }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/network/GsonConverter.java
package ai.sensfrx.network; import ai.sensfrx.utils.Utils; import com.squareup.tape2.ObjectQueue; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; /** * A GsonConverter class that implements the ObjectQueue.Converter interface. * It provides methods to convert objects to and from byte arrays using Gson. * * @param <T> The type of object to be converted. */ public class GsonConverter<T> implements ObjectQueue.Converter<T> { private final Class<T> type; // The class type of the objects being converted /** * Constructor to initialize the GsonConverter with the specific type. * * @param type The class type of the objects being converted. */ public GsonConverter(Class<T> type) { this.type = type; } /** * Converts a byte array into an object of type T. * * @param bytes The byte array to be converted. * @return The converted object of type T, or null if conversion fails. */ @Override public T from(byte[] bytes) { try { // Create a reader from the byte array and convert it to an object using Gson Reader reader = new InputStreamReader(new ByteArrayInputStream(bytes)); return Utils.getGsonInstance().fromJson(reader, type); } catch (Exception e) { e.printStackTrace(); // Print the stack trace if an exception occurs } return null; // Return null if conversion fails } /** * Converts an object of type T into a byte stream. * * @param object The object to be converted. * @param bytes The output stream where the object will be written. * @throws IOException If an I/O error occurs. */ @Override public void toStream(T object, OutputStream bytes) throws IOException { // Create a writer for the output stream and convert the object to JSON using Gson Writer writer = new OutputStreamWriter(bytes); Utils.getGsonInstance().toJson(object, writer); writer.close(); // Close the writer to ensure the data is flushed to the stream } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/network/RetrofitClient.java
package ai.sensfrx.network; import ai.sensfrx.utils.Constants; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; /** * Created by Sensfrx on 03-08-2022. * <br> * Copyright (c) 2022 SecureLayer7 Technologies. All rights reserved. */ public class RetrofitClient { private static volatile Retrofit retrofit = null; private static volatile RetrofitClient retrofitClient; /** * Private Constructor only accessible with in the class * <br> * Responsible to create an instance of Retrofit for network calls */ private RetrofitClient() { String BASE_URL; if (Constants.SANDBOX) { BASE_URL = Constants.BASE_URL_SANDBOX; } else { BASE_URL = Constants.BASE_URL_DEV; } retrofit = new Retrofit.Builder().baseUrl(BASE_URL).addConverterFactory(GsonConverterFactory.create()).build(); } /** * Synchronized static method * <br> * Responsible to return Retrofit client instance * * @return retrofitClient */ public static RetrofitClient getInstance() { if (retrofit == null) { synchronized (RetrofitClient.class) { if (retrofit == null) { retrofitClient = new RetrofitClient(); } } } return retrofitClient; } /** * ApiService interface where all the api's end point stored * <br> * And all the request is define in the ApiService interface * * @return apiServices */ public ApiServices apiServices() { return retrofit.create(ApiServices.class); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/AppCloneDetector.java
package ai.sensfrx.utils; import android.content.Context; import android.util.Log; /** * AppCloneDetector is a utility class that helps detect if an application is cloned. * It provides methods to determine if the app is running in a cloned environment and allows * for custom actions to be triggered upon detecting cloning. */ public class AppCloneDetector { // Constant representing a specific app ID (possibly related to cloning) private static final String APP_ID_999 = "999"; // Context of the application private final Context context; // Tag for logging purposes private final String TAG = "AppCloneDetector"; // Runnable action to be executed when cloning is detected private Runnable onCloningDetect = () -> { }; /** * Private constructor to initialize AppCloneDetector with the application context. * * @param context The application context. */ private AppCloneDetector(Context context) { this.context = context; } /** * Static factory method to create an instance of AppCloneDetector. * * @param context The application context. * @return A new instance of AppCloneDetector. */ public static AppCloneDetector create(Context context) { return new AppCloneDetector(context); } /** * Sets a custom action to be executed when cloning is detected. * * @param action The Runnable action to be executed. * @return The current instance of AppCloneDetector. */ public AppCloneDetector onCloningDetect(Runnable action) { this.onCloningDetect = action; return this; } /** * Checks if the application is cloned by analyzing the file path. * * @return True if the app is detected as cloned, false otherwise. */ public boolean isAppCloned() { String appPath = context.getFilesDir().getPath(); Log.d(TAG, "App Path: " + appPath); // Determines if the app is cloned by checking the app path for the specific ID or an unusual number of dots boolean isCloned = appPath.contains(APP_ID_999) || (countDots(appPath) > countDots(context.getPackageName())); Log.d(TAG, "App is cloned: " + isCloned); return isCloned; } /** * Determines if the app is running in a cloned environment and logs the result. * * @return True if the app is running in a cloned environment, false otherwise. */ public boolean isRunningInClonedEnvironment() { boolean isCloned = isAppCloned(); if (isCloned) { // Custom action can be triggered here if cloning is detected // onCloningDetect.run(); Log.d(TAG, "App is running in cloned environment: true"); try { // Add any additional checks or actions if needed } catch (Exception e) { Log.e(TAG, "isRunningInClonedEnvironment: App should not run in cloned environment! " + e.getMessage()); return true; } } else { Log.d(TAG, "App is running in cloned environment: false"); } return isCloned; } /** * Helper method to count the number of dots (.) in a given string. * * @param string The string to count dots in. * @return The number of dots in the string. */ private int countDots(String string) { return string.split("\\.").length; } /** * Returns the file path of the application. * * @return The file path of the application as a string. */ public String getAppPath() { return context.getFilesDir().getPath(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/AppDataUsageHelper.java
package ai.sensfrx.utils; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.net.TrafficStats; /** * AppDataUsageHelper is a utility class that helps monitor and retrieve the data usage of the application. * It provides methods to get the received and transmitted data usage in bytes and convert them to MB or GB. */ public class AppDataUsageHelper { private static final String TAG = "AppDataUsageHelper"; // Context of the application private final Context context; /** * Constructor to initialize AppDataUsageHelper with the application context. * * @param context The application context. */ public AppDataUsageHelper(Context context) { this.context = context; } /** * Retrieves the unique identifier (UID) of the application. * * @return The UID of the application, or -1 if not found. */ private int getAppUid() { try { PackageManager packageManager = context.getPackageManager(); ApplicationInfo applicationInfo = packageManager.getApplicationInfo(context.getPackageName(), PackageManager.GET_META_DATA); return applicationInfo.uid; } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); return -1; } } /** * Retrieves the amount of data received by the application in bytes. * * @return The number of bytes received by the application. */ public double getReceivedDataUsage() { int uid = getAppUid(); return TrafficStats.getUidRxBytes(uid); } /** * Retrieves the amount of data transmitted by the application in bytes. * * @return The number of bytes transmitted by the application. */ public double getTransmittedDataUsage() { int uid = getAppUid(); return TrafficStats.getUidTxBytes(uid); } /** * Converts bytes to megabytes (MB). * * @param bytes The number of bytes to convert. * @return The equivalent value in MB. */ private double bytesToMB(long bytes) { return (bytes / (1024.0 * 1024.0)); } /** * Converts bytes to gigabytes (GB). * * @param bytes The number of bytes to convert. * @return The equivalent value in GB. */ private double bytesToGB(long bytes) { return (bytes / (1024.0 * 1024.0 * 1024.0)); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/AppSignature.java
package ai.sensfrx.utils; import java.security.GeneralSecurityException; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; import java.util.Base64; /** * Utility class for generating and decoding HMAC-based signatures. * The signatures are used to ensure the integrity and authenticity of data. */ public class AppSignature { // Pre-shared key used for generating the HMAC signature private static final String PRE_SHARED_KEY = "QD32VdbRuMa0iI0q9q7cH6FIHGcNWGdEZOLyK669"; // The algorithm used to generate the HMAC signature private static final String SIGNATURE_ALGORITHM = "HmacSHA256"; /** * Generates a HMAC signature based on the given URL. * * @param url The URL to be signed. * @return The generated HMAC signature as a Base64 encoded string. * @throws IllegalArgumentException if the URL is null or empty. */ public static String generateSignature(String url) { if (url == null || url.isEmpty()) { throw new IllegalArgumentException("URL cannot be null or empty."); // Check for null or empty URL } // Combine the current timestamp, URL, and pre-shared key to create the data to be signed String timestamp = String.valueOf(System.currentTimeMillis()); String data = timestamp + url + PRE_SHARED_KEY; //SensfrxLogger.d("Before encoding : " + data); // Log the data before encoding return calculateHMAC(data); // Generate the HMAC signature } /** * Calculates the HMAC signature for the given data. * * @param data The data to be signed. * @return The HMAC signature as a Base64 encoded string. * @throws IllegalArgumentException if the HMAC calculation fails. */ private static String calculateHMAC(String data) { try { // Create a secret key from the pre-shared key SecretKeySpec signingKey = new SecretKeySpec(PRE_SHARED_KEY.getBytes(), SIGNATURE_ALGORITHM); // Initialize the HMAC algorithm with the signing key Mac mac = Mac.getInstance(SIGNATURE_ALGORITHM); mac.init(signingKey); // Perform the HMAC calculation on the data byte[] rawHmac = mac.doFinal(data.getBytes()); // Return the HMAC signature as a Base64 encoded string return Base64.getEncoder().encodeToString(rawHmac); } catch (GeneralSecurityException e) { throw new IllegalArgumentException("Failed to calculate HMAC", e); // Handle any security exceptions } } /** * Decodes a Base64 encoded HMAC signature. * * @param encodedSignature The Base64 encoded HMAC signature to be decoded. * @return The decoded signature as a string. * @throws IllegalArgumentException if the encoded signature is null or empty. */ public static String decodeSignature(String encodedSignature) { if (encodedSignature == null || encodedSignature.isEmpty()) { throw new IllegalArgumentException("Encoded signature cannot be null or empty."); // Check for null or empty signature } // Decode the Base64 encoded signature to retrieve the original bytes byte[] decodedBytes = Base64.getDecoder().decode(encodedSignature); // Convert the bytes back to a string and return return new String(decodedBytes); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/Constants.java
package ai.sensfrx.utils; public class Constants { public static boolean SANDBOX = false; public static String BASE_URL_PRODUCTION = "https://m.sensfrx.ai/v1/"; public static String BASE_URL_DEV = "https://mdev.sensfrx.ai/v1/"; public static String BASE_URL_SANDBOX = "https://sandboxmdev.sensfrx.ai/v1/"; public static final String PACKAGE_NAME_PARA = "package"; public static final String SECRET_KEY_PARA = "authorization"; public static final String TIME_STAMP = "timestamp"; public static final String API_SIGN = "sign"; public static final String TOKEN = "token"; public static String SECRET_KEY = ""; }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/EncodeSecret.java
package ai.sensfrx.utils; import android.util.Base64; import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; /** * EncodeSecret is a utility class for encoding and decoding strings using Base64. * It provides methods to encode a string into Base64 format and decode a Base64 encoded string. */ public class EncodeSecret { /** * Encodes the provided string using Base64 encoding. * * @param propertyIdAndSecret The string to be encoded. * @return The Base64 encoded string. */ public static String encodeSecret(String propertyIdAndSecret) { byte[] data = new byte[0]; try { // Convert the input string to bytes using UTF-8 encoding data = propertyIdAndSecret.getBytes(StandardCharsets.UTF_8); } finally { // Encode the byte array to Base64 string and return return Base64.encodeToString(data, Base64.NO_WRAP); } } /** * Decodes the provided Base64 encoded string. * * @param encoded The Base64 encoded string to be decoded. * @return The decoded string. */ private String decodeSecret(String encoded) { // Decode the Base64 encoded string back to a byte array byte[] dataDec = Base64.decode(encoded, Base64.NO_WRAP); String decodedString = ""; try { // Convert the byte array back to a string using UTF-8 encoding decodedString = new String(dataDec, "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } finally { // Return the decoded string return decodedString; } } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/SensfrxLogger.java
package ai.sensfrx.utils; import android.util.Log; public class SensfrxLogger { private static final String TAG = "Sensfrx"; /** * Error log * * @param message Error Message */ public static void e(String message) { Log.e(TAG, message); } /** * Error log with throwable * * @param message Error Message * @param throwable Throwable */ public static void e(String message, Throwable throwable) { Log.e(TAG, message, throwable); } /** * Warning Log * * @param message Warning Message */ public static void w(String message) { Log.e(TAG, message); } /** * Debug Log * * @param message Debug Message */ public static void d(String message) { Log.d(TAG, message); } /** * Info Log * * @param message Info Message */ public static void i(String message) { Log.i(TAG, message); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/StringCompressor.java
package ai.sensfrx.utils; import android.os.Build; import androidx.annotation.RequiresApi; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Base64; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; /** * Utility class for string compression and Base64 encoding. */ public class StringCompressor { /** * Compresses a string using GZIP compression. * * @param input The string to compress. * @return A byte array containing the compressed data. * @throws IOException If an I/O error occurs during compression. */ public static byte[] compressString(String input) throws IOException { // Create an output stream to hold the compressed data ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); // Create a GZIPOutputStream to compress the input string GZIPOutputStream gzipOutputStream = new GZIPOutputStream(outputStream); gzipOutputStream.write(input.getBytes("UTF-8")); // Write the input string as bytes gzipOutputStream.close(); // Close the stream to complete compression // Return the compressed data as a byte array return outputStream.toByteArray(); } /** * Encodes a byte array to a Base64 string. * * @param data The byte array to encode. * @return A Base64 encoded string. */ @RequiresApi(api = Build.VERSION_CODES.O) public static String encodeBase64(byte[] data) { // Encode the byte array to a Base64 string return Base64.getEncoder().encodeToString(data); } /** * Decompresses a GZIP compressed byte array back into a string. * * @param compressedData The compressed data as a byte array. * @return The decompressed string. * @throws IOException If an I/O error occurs during decompression. */ public static String decompressString(byte[] compressedData) throws IOException { // Create an input stream to read the compressed data ByteArrayInputStream inputStream = new ByteArrayInputStream(compressedData); // Create a GZIPInputStream to decompress the data GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream); // Buffer to hold the decompressed data byte[] buffer = new byte[1024]; StringBuilder stringBuilder = new StringBuilder(); int bytesRead; // Read the decompressed data into the buffer and append to the StringBuilder while ((bytesRead = gzipInputStream.read(buffer)) != -1) { stringBuilder.append(new String(buffer, 0, bytesRead, "UTF-8")); } gzipInputStream.close(); // Close the stream to complete decompression // Return the decompressed string return stringBuilder.toString(); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/ai/sensfrx/utils/Utils.java
package ai.sensfrx.utils; import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo; import ai.sensfrx.data.model.ClickEvent; import ai.sensfrx.data.model.Event; import ai.sensfrx.data.model.ScreenChangeEvent; import ai.sensfrx.em.EventAdapter; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; /** * Utility class that provides various helper methods used throughout the application. */ public class Utils { // Singleton instance of SimpleDateFormat for formatting dates private static SimpleDateFormat formatter = null; // Singleton instance of Gson for JSON serialization/deserialization private static Gson gson; /** * Returns a singleton instance of Gson configured with custom adapters for Event-related classes. * * @return Gson instance with custom adapters. */ public static Gson getGsonInstance() { if (gson == null) { GsonBuilder gsonBuilder = new GsonBuilder(); // Register custom adapters for event-related classes EventAdapter eventAdapter = new EventAdapter(); gsonBuilder.registerTypeAdapter(Event.class, eventAdapter); gsonBuilder.registerTypeAdapter(ScreenChangeEvent.class, eventAdapter); gsonBuilder.registerTypeAdapter(ClickEvent.class, eventAdapter); // Create the Gson instance with the registered adapters gson = gsonBuilder.create(); } return gson; } /** * Calculates CPU usage as a percentage. * * @param toCalculate The value to calculate. * @param maximum The maximum value for the calculation. * @return The CPU usage percentage, or 30% if the maximum is zero. */ public static int calculateCpuUsagePercentage(int toCalculate, int maximum) { if (maximum != 0) { return (100 * toCalculate) / maximum; } else { return 30; } } /** * Checks if the device is connected to a network. * * @param context The application context. * @return True if the network is available, false otherwise. */ public static boolean isNetworkAvailable(Context context) { ConnectivityManager cm = (ConnectivityManager) context.getApplicationContext().getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo netInfo = cm.getActiveNetworkInfo(); return netInfo != null && netInfo.isConnected(); } /** * Formats a Date object into a string with a specific pattern. * * @param date The Date object to format. * @return A formatted date string. */ private static String formatDate(Date date) { if (formatter == null) { // Define the date format pattern (ISO 8601 format) String pattern = "yyyy-MM-dd'T'HH:mm:ss'Z'"; formatter = new SimpleDateFormat(pattern, new Locale("en")); } return formatter.format(date); } /** * Returns the current timestamp in ISO 8601 format. * * @return The current timestamp as a formatted string. */ public static String getTimestamp() { return formatDate(new Date()); } }
0
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/com/sensefrx
java-sources/ai/sensfrx/sensfrx-sdk/1.0.3/com/sensefrx/sensfrx/BuildConfig.java
/** * Automatically generated file. DO NOT MODIFY */ package com.sensefrx.sensfrx; public final class BuildConfig { public static final boolean DEBUG = false; public static final String LIBRARY_PACKAGE_NAME = "com.sensefrx.sensfrx"; public static final String BUILD_TYPE = "release"; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/config/Taurus.java
package ai.silot.taurus.config; public class Taurus { /** * Generate API Key in Taurus Dashboard. */ public static String apiKey; /** * Server domain * Note that different environment use different URL */ public static String serverUrl; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/TaurusBaseVo.java
package ai.silot.taurus.model; import lombok.Data; import java.io.Serializable; @Data public class TaurusBaseVo<T> implements Serializable { private Integer code; private String msg; private T data; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/TaurusListVo.java
package ai.silot.taurus.model; import lombok.Data; import java.io.Serializable; import java.util.List; @Data public class TaurusListVo<T> implements Serializable { public boolean hasNext; public List<T> list; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/disbursement/DisbursementVo.java
package ai.silot.taurus.model.disbursement; import lombok.Data; import java.io.Serializable; import java.math.BigDecimal; import java.util.Date; @Data public class DisbursementVo implements Serializable { private Long disbursementId; private String externalId; private BigDecimal amount; private String currency; private String bankCode; private String accountHolderName; private String accountNumber; private String description; private String status; private String failedReason; private Date completeTime; private Date createTime; private Date modifyTime; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/invoice/InvoiceVo.java
package ai.silot.taurus.model.invoice; import lombok.Data; import java.io.Serializable; import java.math.BigDecimal; import java.util.Date; @Data public class InvoiceVo implements Serializable { private Long invoiceId; private String externalId; private String status; private BigDecimal amount; private BigDecimal feesPaidAmount; private BigDecimal adjustedReceivedAmount; private String description; private String merchantName; private String invoiceUrl; private String successRedirectUrl; private String currency; private Date expiryTime; private Date payTime; private Date createTime; private Date modifyTime; private Date settleTime; private String paidChannel; private String paidMethod; private String paymentDestination; private String eWalletType; private InvoiceVoPaymentChannel paymentChannel; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/invoice/InvoiceVoAvailableBank.java
package ai.silot.taurus.model.invoice; import lombok.Data; import java.io.Serializable; @Data public class InvoiceVoAvailableBank implements Serializable { private String accountHolderName; private String bankCode; private String virtualAccountNumber; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/invoice/InvoiceVoAvailableEWallet.java
package ai.silot.taurus.model.invoice; import lombok.Data; import java.io.Serializable; @Data public class InvoiceVoAvailableEWallet implements Serializable { private String bankCode; private String name; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/model/invoice/InvoiceVoPaymentChannel.java
package ai.silot.taurus.model.invoice; import lombok.Data; import java.io.Serializable; import java.util.List; @Data public class InvoiceVoPaymentChannel implements Serializable { private List<InvoiceVoAvailableBank> availableBanks; private List<InvoiceVoAvailableEWallet> availableEwallets; }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/service/DisbursementService.java
package ai.silot.taurus.service; import ai.silot.taurus.config.Taurus; import ai.silot.taurus.model.TaurusBaseVo; import ai.silot.taurus.model.TaurusListVo; import ai.silot.taurus.model.disbursement.DisbursementVo; import ai.silot.taurus.util.TaurusHttpUtil; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.io.IOException; import java.lang.reflect.Type; import java.math.BigDecimal; import java.util.HashMap; import java.util.Map; public class DisbursementService { /** * Create Disbursement * * @param externalId ID of the disbursement in your system, used to reconcile disbursements after they have been completed. * @param amount Amount to disburse * @param bankCode Code of the destination bank * @param accountHolderName Name of account holder as per the bank's or e-wallet's records. Used for verification and error/customer support scenarios. * @param accountNumber Destination bank account number. If disbursing to an e-wallet, phone number registered with the e-wallet account. * @param description Description to send with the disbursement * @return Current disbursement vo * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<DisbursementVo> create(String externalId, BigDecimal amount, String bankCode, String accountHolderName, String accountNumber, String description) throws IOException { String url = String.format("%s%s", Taurus.serverUrl, "/api/v1/disbursement"); Type type = new TypeToken<TaurusBaseVo<DisbursementVo>>() { }.getType(); Map<String, Object> paramMap = new HashMap<>(); paramMap.put("externalId", externalId); paramMap.put("amount", amount); paramMap.put("bankCode", bankCode); paramMap.put("accountHolderName", accountHolderName); paramMap.put("accountNumber", accountNumber); paramMap.put("description", description); return TaurusHttpUtil.post(url, type, new Gson().toJson(paramMap)); } /** * Get Disbursement By ID * * @param disbursementId An disbursement ID generated by Taurus * @return disbursement vo * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<DisbursementVo> getById(Long disbursementId) throws IOException { String url = String.format("%s%s?disbursementId=%s", Taurus.serverUrl, "/api/v1/disbursement", disbursementId); Type type = new TypeToken<TaurusBaseVo<DisbursementVo>>() { }.getType(); return TaurusHttpUtil.get(url, type); } /** * List Disbursements * This endpoint queries the current status of all disbursements with sepcific conditions. * * @param limit (optional) default 10 * @param lastDisbursementId (optional) A cursor for use in pagination. * @param status (optional) disbursement status. Enum: "PENDING" "COMPLETED" "FAILED" * @return disbursement vo list * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<TaurusListVo<DisbursementVo>> list(Integer limit, Long lastDisbursementId, String status) throws IOException { String url = String.format("%s%s?limit=%s&lastDisbursementId=%s&status=%s", Taurus.serverUrl, "/api/v1/disbursement/list", limit == null ? "" : limit, lastDisbursementId == null ? "" : lastDisbursementId, status == null ? "" : status); Type type = new TypeToken<TaurusBaseVo<TaurusListVo<DisbursementVo>>>() { }.getType(); return TaurusHttpUtil.get(url, type); } }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/service/InvoiceService.java
package ai.silot.taurus.service; import ai.silot.taurus.config.Taurus; import ai.silot.taurus.model.TaurusBaseVo; import ai.silot.taurus.model.TaurusListVo; import ai.silot.taurus.model.invoice.InvoiceVo; import ai.silot.taurus.util.TaurusHttpUtil; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.io.IOException; import java.lang.reflect.Type; import java.math.BigDecimal; import java.util.HashMap; import java.util.Map; public class InvoiceService { /** * Create Invoice * * @param externalId ID of your choice (typically the unique identifier of an invoice in your system) * @param amount Amount on the invoice. The minimum amount to create an invoice is 1 IDR * @param description Description of the invoice * @param invoiceDuration Duration of time that the end customer is given to pay the invoice before expiration (in seconds, since creation). Default is 24 hours (86,400 seconds). * @param successRedirectUrl URL that the end customer will be redirected to upon successful invoice payment. * @return Current invoice vo * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<InvoiceVo> create(String externalId, BigDecimal amount, String description, Integer invoiceDuration, String successRedirectUrl) throws IOException { String url = String.format("%s%s", Taurus.serverUrl, "/api/v1/invoice"); Type type = new TypeToken<TaurusBaseVo<InvoiceVo>>() { }.getType(); Map<String, Object> paramMap = new HashMap<>(); paramMap.put("externalId", externalId); paramMap.put("amount", amount); paramMap.put("description", description); paramMap.put("invoiceDuration", invoiceDuration); paramMap.put("successRedirectUrl", successRedirectUrl); return TaurusHttpUtil.post(url, type, new Gson().toJson(paramMap)); } /** * Get invoice detail * * @param invoiceId An invoice ID generated by Taurus * @return invoice vo * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<InvoiceVo> getById(Long invoiceId) throws IOException { String url = String.format("%s%s?invoiceId=%s", Taurus.serverUrl, "/api/v1/invoice", invoiceId); Type type = new TypeToken<TaurusBaseVo<InvoiceVo>>() { }.getType(); return TaurusHttpUtil.get(url, type); } /** * List All Invoices * You can list all invoices, or list the invoices for a specific updated time. * The invoices are returned sorted by created date, with the most recently created invoices appearing first. * * @param limit (optional) default 10 * @param lastInvoiceId (optional) A cursor for use in pagination. * @param status (optional) Invoice status. Enum: "UNPAID" "PAID" "EXPIRED" "SETTLED" * @return invoice vo list * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<TaurusListVo<InvoiceVo>> list(Integer limit, Long lastInvoiceId, String status) throws IOException { String url = String.format("%s%s?limit=%s&lastInvoiceId=%s&status=%s", Taurus.serverUrl, "/api/v1/invoice/list", limit == null ? "" : limit, lastInvoiceId == null ? "" : lastInvoiceId, status == null ? "" : status); Type type = new TypeToken<TaurusBaseVo<TaurusListVo<InvoiceVo>>>() { }.getType(); return TaurusHttpUtil.get(url, type); } /** * You can cancel an already created invoice by expiring it immediately using this endpoint. * * @param invoiceId An invoice ID generated by Taurus * @return Common response * @throws IOException May be caused by an HTTP request */ public static TaurusBaseVo<Object> expire(Long invoiceId) throws IOException { String url = String.format("%s%s", Taurus.serverUrl, "/api/v1/invoice/expire"); Type type = new TypeToken<TaurusBaseVo<Object>>() { }.getType(); Map<String, Object> paramMap = new HashMap<>(); paramMap.put("invoiceId", invoiceId); return TaurusHttpUtil.post(url, type, new Gson().toJson(paramMap)); } }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/util/Base64.java
package ai.silot.taurus.util; public class Base64 { /** * 标准编码表 */ private static final byte[] ENCODE_TABLE = { // 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', // 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', // 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', // 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', // 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', // 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', // 'w', 'x', 'y', 'z', '0', '1', '2', '3', // '4', '5', '6', '7', '8', '9', '+', '/' // }; public static String encode(String content) { if (content == null) { return null; } byte[] encode = encode(content.getBytes(), false); if (encode == null) { return null; } return new String(encode); } public static byte[] encode(byte[] arr, boolean isMultiLine) { if (null == arr) { return null; } int len = arr.length; if (len == 0) { return new byte[0]; } int evenLen = (len / 3) * 3; int cnt = ((len - 1) / 3 + 1) << 2; int destLen = cnt + (isMultiLine ? (cnt - 1) / 76 << 1 : 0); byte[] dest = new byte[destLen]; byte[] encodeTable = ENCODE_TABLE; for (int s = 0, d = 0, cc = 0; s < evenLen; ) { int i = (arr[s++] & 0xff) << 16 | (arr[s++] & 0xff) << 8 | (arr[s++] & 0xff); dest[d++] = encodeTable[(i >>> 18) & 0x3f]; dest[d++] = encodeTable[(i >>> 12) & 0x3f]; dest[d++] = encodeTable[(i >>> 6) & 0x3f]; dest[d++] = encodeTable[i & 0x3f]; if (isMultiLine && ++cc == 19 && d < destLen - 2) { dest[d++] = '\r'; dest[d++] = '\n'; cc = 0; } } int left = len - evenLen;// 剩余位数 if (left > 0) { int i = ((arr[evenLen] & 0xff) << 10) | (left == 2 ? ((arr[len - 1] & 0xff) << 2) : 0); dest[destLen - 4] = encodeTable[i >> 12]; dest[destLen - 3] = encodeTable[(i >>> 6) & 0x3f]; dest[destLen - 2] = (left == 2) ? encodeTable[i & 0x3f] : (byte) '='; dest[destLen - 1] = '='; } return dest; } }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/util/HttpRequest.java
package ai.silot.taurus.util; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class HttpRequest { private Method method = Method.GET; private final Map<String, List<String>> headers = new HashMap<>(); private final String url; private String jsonBody; public HttpRequest(String url) { this.url = url; } public HttpRequest header(String name, String value) { if (null != name && null != value) { final ArrayList<String> valueList = new ArrayList<>(); valueList.add(value); this.headers.put(name, valueList); } return this; } public HttpRequest header(Map<String, List<String>> headers) { if (headers == null || headers.isEmpty()) { return this; } this.headers.putAll(headers); return this; } public HttpRequest body(String jsonBody) { this.jsonBody = jsonBody; return this; } public HttpRequest method(Method method) { this.method = method; return this; } public static HttpRequest get(String url) { return new HttpRequest(url).method(Method.GET); } public static HttpRequest post(String url) { return new HttpRequest(url).method(Method.POST); } public HttpResponse execute() throws IOException { URL url = new URL(this.url); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setConnectTimeout(30000); connection.setUseCaches(false); connection.setRequestMethod(this.method.name()); for (Entry<String, List<String>> entry : headers.entrySet()) { for (String value : entry.getValue()) { connection.setRequestProperty(entry.getKey(), value); } } String params = ""; if (Method.POST.equals(this.method) || Method.PUT.equals(this.method) || Method.DELETE.equals(this.method)) { connection.setDoOutput(true); connection.setRequestProperty("Accept-Charset", "utf-8"); connection.setRequestProperty("Content-Type", "application/json;charset=utf-8"); OutputStream stream = connection.getOutputStream(); if (jsonBody != null) { params = jsonBody; } stream.write(params.getBytes(StandardCharsets.UTF_8)); stream.close(); } int responseCode = connection.getResponseCode(); String responseBody; InputStream inputStream; if (responseCode >= 200 && responseCode < 300) { inputStream = connection.getInputStream(); } else { inputStream = connection.getErrorStream(); } BufferedReader br = new BufferedReader(new InputStreamReader(inputStream)); StringBuilder sb = new StringBuilder(); String line; while ((line = br.readLine()) != null) { sb.append(line); } br.close(); responseBody = sb.toString(); HttpResponse httpResponse = new HttpResponse(); httpResponse.setResponseBody(responseBody); return httpResponse; } }
0
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus
java-sources/ai/silot/taurus/taurus-sdk-java/1.0.0/ai/silot/taurus/util/HttpResponse.java
package ai.silot.taurus.util; public class HttpResponse { private String responseBody; public HttpResponse() { } public void setResponseBody(String responseBody) { this.responseBody = responseBody; } public String body() { return responseBody; } }