repo stringlengths 1 191 ⌀ | file stringlengths 23 351 | code stringlengths 0 5.32M | file_length int64 0 5.32M | avg_line_length float64 0 2.9k | max_line_length int64 0 288k | extension_type stringclasses 1 value |
|---|---|---|---|---|---|---|
librec | librec-master/librec/src/main/java/librec/baseline/RandomGuess.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.baseline;
import librec.data.SparseMatrix;
import librec.intf.Recommender;
import librec.util.Randoms;
/**
* Baseline: predict by a random value in (minRate, maxRate)
*
* @author guoguibing
*
*/
public class RandomGuess extends Recommender {
public RandomGuess(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
setAlgoName("Random");
}
@Override
public double predict(int u, int j) {
return Randoms.uniform(minRate, maxRate);
}
}
| 1,223 | 26.2 | 82 | java |
librec | librec-master/librec/src/main/java/librec/baseline/ItemAverage.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.baseline;
import java.util.HashMap;
import java.util.Map;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.Recommender;
/**
* Baseline: predict by the average of target item's ratings
*
* @author guoguibing
*
*/
public class ItemAverage extends Recommender {
private Map<Integer, Double> itemMeans;
public ItemAverage(SparseMatrix rm, SparseMatrix tm, int fold) {
super(rm, tm, fold);
itemMeans = new HashMap<>();
algoName = "ItemAvg";
}
@Override
public double predict(int u, int j) {
if (!itemMeans.containsKey(j)) {
SparseVector jv = trainMatrix.column(j);
double mean = jv.getCount() > 0 ? jv.mean() : globalMean;
itemMeans.put(j, mean);
}
return itemMeans.get(j);
}
}
| 1,469 | 25.25 | 71 | java |
librec | librec-master/librec/src/main/java/librec/metric/IRatingMetric.java | package librec.metric;
import librec.intf.Recommender;
import librec.util.Logs;
import java.util.List;
/**
* Rating metrics are based on the difference between the user's rating and
* that returned by the recommender.
* Created by rdburke on 8/1/16.
*/
public interface IRatingMetric extends IMetric {
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec);
}
// DEFAULT RATING METRICS
// xMAE, xRMSE, xNMAE, xrMAE, xrRMSE, xMPE, xPerplexity,
class MetricMAE implements IRatingMetric {
private double m_totalErr;
private double m_mae;
public String getName () { return "MAE";}
public void init(Recommender rec) {
m_totalErr = 0.0;
m_mae = -1;
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
double err = Math.abs(actual - predicted);
m_totalErr += err;
}
public void compute(int count) {
m_mae = m_totalErr / count;
}
public double getValue() { return m_mae;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricRMSE implements IRatingMetric {
private double m_totalSqErr;
private double m_rmse;
public String getName () { return "RMSE";}
public void init(Recommender rec) {
m_totalSqErr = 0.0;
m_rmse = -1;
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
double err = Math.abs(actual - predicted);
m_totalSqErr += err * err;
}
public void compute(int count) {
m_rmse = Math.sqrt(m_totalSqErr / count);
}
public double getValue() { return m_rmse;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricRMAE implements IRatingMetric {
private double m_totalErr;
private double m_rmae;
private double m_minRate;
public String getName () { return "R_MAE";}
public void init(Recommender rec) {
m_totalErr = 0.0;
m_rmae = -1;
List<Double> ratingScale = rec.rateDao.getRatingScale();
m_minRate = ratingScale.get(0);
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
// rounding prediction to the closest rating level
double rPred = Math.round(predicted / m_minRate) * m_minRate;
double err = Math.abs(actual - rPred);
m_totalErr += err;
}
public void compute(int count) { m_rmae = m_totalErr / count; }
public double getValue() { return m_rmae;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricRRMSE implements IRatingMetric {
private double m_totalSqErr;
private double m_rrmse;
private double m_minRate;
public String getName () { return "R_RMSE";}
public void init(Recommender rec) {
m_totalSqErr = 0.0;
m_rrmse = -1;
List<Double>ratingScale = rec.rateDao.getRatingScale();
m_minRate = ratingScale.get(0);
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
// rounding prediction to the closest rating level
double rPred = Math.round(predicted / m_minRate) * m_minRate;
double err = Math.abs(actual - rPred);
m_totalSqErr += err * err;
}
public void compute(int count) {
m_rrmse = Math.sqrt(m_totalSqErr / count);
}
public double getValue() { return m_rrmse;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricNMAE implements IRatingMetric {
private double m_totalErr;
private double m_nmae;
private double m_minRate, m_maxRate;
public String getName () { return "NMAE";}
public void init(Recommender rec) {
m_totalErr = 0.0;
m_nmae = -1;
List<Double>ratingScale = rec.rateDao.getRatingScale();
m_minRate = ratingScale.get(0);
m_maxRate = ratingScale.get(ratingScale.size() - 1);
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
double err = Math.abs(actual - predicted);
m_totalErr += err;
}
public void compute(int count) {
double mae = m_totalErr / count;
m_nmae = mae / (m_maxRate - m_minRate);
}
public double getValue() { return m_nmae;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricMPE implements IRatingMetric {
private int m_errCount;
private static double s_threshold = 1e-5;
private double m_mpe;
private double m_minRate;
public String getName () { return "MPE";}
public void init(Recommender rec) {
m_errCount = 0;
m_mpe = -1;
List<Double>ratingScale = rec.rateDao.getRatingScale();
m_minRate = ratingScale.get(0);
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
// rounding prediction to the closest rating level
double rPred = Math.round(predicted / m_minRate) * m_minRate;
double err = Math.abs(actual - rPred);
if (err > s_threshold) m_errCount++;
}
public void compute(int count) {
m_mpe = (double)(m_errCount) / count;
}
public double getValue() { return m_mpe;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class Perplexity implements IRatingMetric {
private double m_sumPerps;
private double m_perp;
public String getName () { return "Perplexity";}
public void init(Recommender rec) {
m_sumPerps = 0.0;
m_perp = -1;
}
public void updatePredicted(int user, int item,
double predicted, double actual,
Recommender rec) {
try {
double perp = rec.perplexity(user, item, predicted);
m_sumPerps += perp;
} catch (Exception e) {
Logs.debug("Error computing perplexity: " + e.toString());
}
}
public void compute (int count) {
if (m_sumPerps > 0) {
m_perp = Math.exp(m_sumPerps / count);
}
}
public double getValue() { return m_perp;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
| 7,255 | 30.964758 | 90 | java |
librec | librec-master/librec/src/main/java/librec/metric/MetricDict.java | package librec.metric;
import librec.intf.Recommender;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.Collection;
/**
* Utility class to associate metric names with the metric objects.
* Created by rdburke on 8/1/16.
*/
public class MetricDict<T extends IMetric> {
Map<String, T> m_map;
public MetricDict() {
m_map = new HashMap<String, T>();
}
public boolean isEmpty() {
return m_map.isEmpty();
}
public void addMetric(String name, T metric) {
m_map.put(name, metric);
}
public T getMetric(String name) {
return m_map.get(name);
}
public Collection<T> getMetrics () {
return m_map.values();
}
public boolean hasMetric(String name) {
return m_map.containsKey(name);
}
public void initAll(Recommender rec) {
for (T metric : m_map.values()) {
metric.init(rec);
}
}
public void computeAll(int count) {
for (T metric : m_map.values()) {
metric.compute(count);
}
}
public String getResultString () {
StringBuffer buf = new StringBuffer();
List<String> names = getNames();
for (String name : names) {
T metric = m_map.get(name);
String result = String.format("%.6f,", metric.getValue());
buf.append(result);
}
// Remove final comma
buf.deleteCharAt(buf.length()-1);
return buf.toString();
}
public List<String> getNames () {
List<String> names = new ArrayList(m_map.keySet());
java.util.Collections.sort(names);
return names;
}
public String getNamesString () {
StringBuffer buf = new StringBuffer();
List<String> names = getNames();
for (String name : names) {
buf.append(name);
buf.append(",");
}
// Remove final comma
buf.deleteCharAt(buf.length()-1);
return buf.toString();
}
}
| 2,055 | 23.188235 | 70 | java |
librec | librec-master/librec/src/main/java/librec/metric/AverageMetric.java | package librec.metric;
import librec.intf.Recommender;
/**
* Created by rdburke on 8/2/16.
*/
public class AverageMetric implements IMetric {
String m_name;
double m_sum;
double m_avg;
public AverageMetric (String name) {
m_name = name;
}
public String getName () { return m_name; }
public void init(Recommender rec) {
m_sum = 0.0;
m_avg = -1;
}
public void update(double value) {
m_sum += value;
}
public void compute(int count) {
m_avg = m_sum / count;
}
/**
* Returns the value as a double
* @return
*/
public double getValue () { return m_avg; }
/**
* Returns the value in string form.
* @return
*/
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
| 884 | 17.829787 | 90 | java |
librec | librec-master/librec/src/main/java/librec/metric/IDiversityMetric.java | package librec.metric;
import librec.intf.Recommender;
import java.util.List;
/**
* Created by rdburke on 8/2/16.
*/
public interface IDiversityMetric<T> extends IMetric {
public void updateDiversity(List<T> results, Recommender rec);
}
// diversity measures xD5, xD10,
class MetricDiv5 implements IDiversityMetric<Integer> {
private double m_sumDiv5;
private double m_div;
public String getName () { return "Div5";}
public void init(Recommender rec) {
m_sumDiv5 = 0.0;
m_div = -1;
}
public void updateDiversity(List<Integer> results, Recommender rec) {
double div = rec.diverseAt(results, 5);
m_sumDiv5 += div;
}
public void compute(int count) {
m_div = m_sumDiv5 / count;
}
public double getValue() { return m_div;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricDiv10 implements IDiversityMetric<Integer> {
private double m_sumDiv10;
private double m_div;
public String getName () { return "Div10";}
public void init(Recommender rec) {
m_sumDiv10 = 0.0;
m_div = -1;
}
public void updateDiversity(List<Integer> results, Recommender rec) {
double div = rec.diverseAt(results, 10);
m_sumDiv10 += div;
}
public void compute(int count) {
m_div = m_sumDiv10 / count;
}
public double getValue() { return m_div;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
| 1,632 | 23.742424 | 90 | java |
librec | librec-master/librec/src/main/java/librec/metric/IMetric.java | package librec.metric;
import librec.intf.Recommender;
/**
* General interface for metrics that evaluate recommender systems.
* Created by rdburke on 8/1/16.
*/
public interface IMetric {
/**
*
* @return the name of the metric, used for lookup
*/
public String getName ();
/**
* Initializes the metric
* @param rec
*/
public void init(Recommender rec);
/**
* After the values have been accumulated, this function does the final computation.
* @param count
*/
public void compute(int count);
/**
* Returns the value as a double
* @return
*/
public double getValue ();
/**
* Returns the value in string form.
* @return
*/
public String getValueAsString ();
/**
* Return the value annotated with the metric name, as in "Prec5: (0.375)"
* @return
*/
public String toString();
}
| 923 | 18.659574 | 88 | java |
librec | librec-master/librec/src/main/java/librec/metric/AvgMetricCollection.java | package librec.metric;
import librec.intf.Recommender;
import java.util.ArrayList;
import java.util.List;
/**
* Created by rdburke on 8/2/16.
*/
public class AvgMetricCollection {
MetricDict<AverageMetric> m_avgMetrics;
public AvgMetricCollection (Recommender rec) {
m_avgMetrics = new MetricDict<AverageMetric>();
List<String> names = new ArrayList<String>();
names.addAll(rec.measures.getRatingMetricNames());
names.addAll(rec.measures.getRankingMetricNames());
names.addAll(rec.measures.getTimeMetricNames());
names.addAll(rec.measures.getDiversityMetricNames());
for (String name : names) {
m_avgMetrics.addMetric(name, new AverageMetric(name));
}
}
public AverageMetric getMetric(String name) {
return m_avgMetrics.getMetric(name);
}
public void initAll(Recommender rec) {
for (AverageMetric metric : m_avgMetrics.getMetrics()) {
metric.init(rec);
}
}
public void updateFromMeasures (MetricCollection measures) {
for (IRatingMetric metric : measures.getRatingMetrics()) {
AverageMetric avgMetric = m_avgMetrics.getMetric(metric.getName());
avgMetric.update(metric.getValue());
}
for (IRankingMetric metric : measures.getRankingMetrics()) {
AverageMetric avgMetric = m_avgMetrics.getMetric(metric.getName());
avgMetric.update(metric.getValue());
}
for (ITimeMetric metric : measures.getTimeMetrics()) {
AverageMetric avgMetric = m_avgMetrics.getMetric(metric.getName());
avgMetric.update(metric.getValue());
}
for (IDiversityMetric metric : measures.getDiversityMetrics()) {
AverageMetric avgMetric = m_avgMetrics.getMetric(metric.getName());
avgMetric.update(metric.getValue());
}
}
public void compute (int count) {
for (AverageMetric metric : m_avgMetrics.getMetrics()) {
metric.compute(count);
}
}
public String getMetricNamesString () {
return m_avgMetrics.getNamesString();
}
public String getEvalResultString () {
return m_avgMetrics.getResultString();
}
}
| 2,258 | 30.816901 | 79 | java |
librec | librec-master/librec/src/main/java/librec/metric/MetricCollection.java | package librec.metric;
import java.util.ArrayList;
import java.util.List;
import java.util.Collection;
import librec.intf.Recommender;
import librec.util.Logs;
/**
* Collects all the metrics to be calculated. There are four separate collections:
* one for ratings, one for rankings, one for time, and one for diversity
* Created by rdburke on 8/1/16.
*/
public class MetricCollection {
public static String ValueFormatString = "%s: %.6f";
public static String[] RatingMetrics =
{"MetricMAE", "MetricRMSE", "MetricNMAE", "MetricRMAE", "MetricRRMSE", "MetricMPE", "Perplexity",
/* execution time */
"TrainTime", "TestTime"};
public static String[] AllMetrics =
/* prediction-based measures */
{"MetricMAE", "MetricRMSE", "MetricNMAE", "MetricRMAE", "MetricRRMSE", "MetricMPE", "Perplexity",
/* ranking-based measures */
"MetricPre5", "MetricPre10", "MetricRec5", "MetricRec10", "MetricMAP", "MetricMRR", "MetricNDCG", "MetricAUC",
/* execution time */
"TrainTime", "TestTime"};
private MetricDict<IRatingMetric> m_ratingMetrics;
private MetricDict<IRankingMetric<Integer>> m_rankingMetrics;
private MetricDict<ITimeMetric> m_timeMetrics;
private MetricDict<IDiversityMetric<Integer>> m_diversityMetrics;
public MetricCollection() {
m_ratingMetrics = new MetricDict<IRatingMetric>();
m_rankingMetrics = new MetricDict<IRankingMetric<Integer>>();
m_timeMetrics = new MetricDict<ITimeMetric>();
m_diversityMetrics = new MetricDict<IDiversityMetric<Integer>>();
}
public MetricCollection(List<String> classNames) throws ClassNotFoundException, IllegalAccessException,
InstantiationException
{
m_ratingMetrics = new MetricDict<IRatingMetric>();
m_rankingMetrics = new MetricDict<IRankingMetric<Integer>>();
m_timeMetrics = new MetricDict<ITimeMetric>();
m_diversityMetrics = new MetricDict<IDiversityMetric<Integer>>();
Class ratingMetricIntf = Class.forName("librec.metric.IRatingMetric");
Class rankingMetricIntf = Class.forName("librec.metric.IRankingMetric");
Class timeMetricIntf = Class.forName("librec.metric.ITimeMetric");
Class diversityMetricIntf = Class.forName("librec.metric.IDiversityMetric");
for (String className : classNames) {
Class metricClass = Class.forName(className);
if (implementsInterface(metricClass, ratingMetricIntf)) {
IRatingMetric metric = (IRatingMetric) metricClass.newInstance();
m_ratingMetrics.addMetric(metric.getName(), metric);
} else if (implementsInterface(metricClass, rankingMetricIntf)) {
IRankingMetric<Integer> metric = (IRankingMetric<Integer>) metricClass.newInstance();
m_rankingMetrics.addMetric(metric.getName(), metric);
} else if (implementsInterface(metricClass, timeMetricIntf)) {
ITimeMetric metric = (ITimeMetric) metricClass.newInstance();
m_timeMetrics.addMetric(metric.getName(), metric);
} else if (implementsInterface(metricClass, diversityMetricIntf)) {
IDiversityMetric metric = (IDiversityMetric) metricClass.newInstance();
m_diversityMetrics.addMetric(metric.getName(), metric);
} else {
Logs.debug("Unknown metric: " + className);
}
}
}
/**
* Helper function
* @param classObj
* @param intf
* @return True if the class implements the interface
*/
private boolean implementsInterface(Class classObj, Class intf) {
Class[] interfaces = classObj.getInterfaces();
for (Class i : interfaces) {
if (i.toString().equals(intf.toString())) {
return true;
// if this is true, the class implements the interface you're looking for
}
}
return false;
}
public boolean hasRatingMetrics () { return !m_ratingMetrics.isEmpty(); }
public boolean hasRankingMetrics () { return !m_rankingMetrics.isEmpty(); }
public boolean hasTimeMetrics () { return !m_timeMetrics.isEmpty(); }
public boolean hasDiversityMetrics () { return !m_diversityMetrics.isEmpty(); }
public List<String> getRatingMetricNames () { return m_ratingMetrics.getNames(); }
public List<String> getRankingMetricNames () { return m_rankingMetrics.getNames(); }
public List<String> getTimeMetricNames () { return m_timeMetrics.getNames(); }
public List<String> getDiversityMetricNames () { return m_ratingMetrics.getNames(); }
public void setRatingMetrics (List<IRatingMetric> metrics) {
for (IRatingMetric metric : metrics) {
m_ratingMetrics.addMetric(metric.getName(), metric);
}
}
public void setRankingMetrics (List<IRankingMetric> metrics) {
for (IRankingMetric metric : metrics) {
m_rankingMetrics.addMetric(metric.getName(), metric);
}
}
public void setTimeMetrics (List<ITimeMetric> metrics) {
for (ITimeMetric metric : metrics) {
m_timeMetrics.addMetric(metric.getName(), metric);
}
}
public IRatingMetric getRatingMetric (String name) {
return m_ratingMetrics.getMetric(name);
}
public ITimeMetric getTimeMetric (String name) {
return m_timeMetrics.getMetric(name);
}
public IRankingMetric getRankingMetric (String name) {
return m_rankingMetrics.getMetric(name);
}
public IDiversityMetric getDiversityMetric (String name) {
return m_diversityMetrics.getMetric(name);
}
public Collection<IRatingMetric> getRatingMetrics () { return m_ratingMetrics.getMetrics(); }
public Collection<IRankingMetric<Integer>> getRankingMetrics () { return m_rankingMetrics.getMetrics(); }
public Collection<ITimeMetric> getTimeMetrics () { return m_timeMetrics.getMetrics(); }
public Collection<IDiversityMetric<Integer>> getDiversityMetrics () { return m_diversityMetrics.getMetrics(); }
// Time metrics are really just storage locations, so they don't support
// update or compute.
public void init(Recommender rec) {
m_ratingMetrics.initAll(rec);
m_rankingMetrics.initAll(rec);
m_timeMetrics.initAll(rec);
m_diversityMetrics.initAll(rec);
}
public void updateRatingMetrics (int user, int item,
double predicted, double actual, Recommender rec) {
for (IRatingMetric metric : m_ratingMetrics.getMetrics()) {
metric.updatePredicted(user, item, predicted, actual, rec);
}
}
public void updateRankingMetrics (List<Integer> pred, List<Integer> correct, int numDropped,
Recommender rec) {
for (IRankingMetric<Integer> metric : m_rankingMetrics.getMetrics()) {
metric.updateWithList(pred, correct, numDropped);
}
}
public void updateDiversityMetrics (List<Integer> pred, Recommender rec) {
for (IDiversityMetric<Integer> metric : m_diversityMetrics.getMetrics()) {
metric.updateDiversity(pred, rec);
}
}
public void computeRatingMetrics(int count) {
m_ratingMetrics.computeAll(count);
}
public void computeRankingMetrics(int count) {
m_rankingMetrics.computeAll(count);
}
public String getMetricNamesString () {
StringBuilder buf = new StringBuilder();
if (!m_ratingMetrics.isEmpty()) {
buf.append(m_ratingMetrics.getNamesString());
}
if (!m_rankingMetrics.isEmpty()) {
if (buf.length() != 0) {
buf.append(",");
}
buf.append(m_rankingMetrics.getNamesString());
}
if (!m_diversityMetrics.isEmpty()) {
if (buf.length() != 0) {
buf.append(",");
}
buf.append(m_diversityMetrics.getNamesString());
}
return buf.toString();
}
/**
* For backward compatibility, does not return time information here.
* @return
*/
public String getEvalResultString () {
StringBuilder buf = new StringBuilder();
if (!m_ratingMetrics.isEmpty()) {
buf.append(m_ratingMetrics.getResultString());
}
if (!m_rankingMetrics.isEmpty()) {
if (buf.length() != 0) {
buf.append(",");
}
buf.append(m_rankingMetrics.getResultString());
}
if (!m_diversityMetrics.isEmpty()) {
if (buf.length() != 0) {
buf.append(",");
}
buf.append(m_diversityMetrics.getResultString());
}
return buf.toString();
}
}
| 8,961 | 37.62931 | 130 | java |
librec | librec-master/librec/src/main/java/librec/metric/IRankingMetric.java | package librec.metric;
import librec.intf.Recommender;
import librec.util.Measures;
import librec.util.Stats;
import java.util.List;
import java.util.ArrayList;
/**
* Metrics that are based on the rank of the test items.
* Created by rdburke on 8/1/16.
*
* 2016/8/2 RB It seems inefficient to store a bunch of copies of the results list. We might be
* able to solve this by consolidating at the MetricDict level somehow.
* @author rburke
*/
public interface IRankingMetric<T> extends IMetric {
/**
* Updates the metric with a new set of results.
* @param results
* @param test
* @param numDropped Needed by AUC measure
*/
public void updateWithList(List<T> results, List<T> test, int numDropped);
}
/* ranking-based measures */
// xPre5, xPre10, xRec5, xRec10, xMAP, xMRR, xNDCG, xAUC,
class MetricPre5 implements IRankingMetric<Integer> {
private double m_sumPrec;
private double m_prec;
public String getName () { return "Pre5";}
public void init(Recommender rec) {
m_sumPrec = 0.0;
m_prec = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double prec = Measures.PrecAt(results, test, 5);
m_sumPrec += prec;
}
public void compute(int count) {
m_prec = m_sumPrec / count;
}
public double getValue() { return m_prec;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricPre10 implements IRankingMetric<Integer> {
private double m_sumPrec;
private double m_prec;
public String getName () { return "Pre10";}
public void init(Recommender rec) {
m_sumPrec = 0.0;
m_prec = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double prec = Measures.PrecAt(results, test, 10);
m_sumPrec += prec;
}
public void compute(int count) {
m_prec = m_sumPrec / count;
}
public double getValue() { return m_prec;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricRec5 implements IRankingMetric<Integer> {
private double m_sumRec;
private double m_rec;
public String getName () { return "Rec5";}
public void init(Recommender rec) {
m_sumRec = 0.0;
m_rec = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double rec = Measures.RecallAt(results, test, 5);
m_sumRec += rec;
}
public void compute(int count) {
m_rec = m_sumRec / count;
}
public double getValue() { return m_rec;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricRec10 implements IRankingMetric<Integer> {
private double m_sumRec;
private double m_rec;
public String getName () { return "Rec10";}
public void init(Recommender rec) {
m_sumRec = 0.0;
m_rec = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double rec = Measures.RecallAt(results, test, 10);
m_sumRec += rec;
}
public void compute(int count) {
m_rec = m_sumRec / count;
}
public double getValue() { return m_rec;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricMAP implements IRankingMetric<Integer> {
private double m_sumAP;
private double m_map;
public String getName () { return "MAP";}
public void init(Recommender rec) {
m_sumAP = 0.0;
m_map = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double ap = Measures.AP(results, test);
m_sumAP += ap;
}
public void compute(int count) {
m_map = m_sumAP / count;
}
public double getValue() { return m_map;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricMRR implements IRankingMetric<Integer> {
private double m_sumRR;
private double m_mrr;
public String getName () { return "MRR";}
public void init(Recommender rec) {
m_sumRR = 0.0;
m_mrr = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double rr = Measures.RR(results, test);
m_sumRR += rr;
}
public void compute(int count) {
m_mrr = m_sumRR / count;
}
public double getValue() { return m_mrr;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricAUC implements IRankingMetric<Integer> {
private double m_sumAUC;
private double m_auc;
public String getName () { return "AUC";}
public void init(Recommender rec) {
m_sumAUC = 0.0;
m_auc = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double auc = Measures.AUC(results, test, numDropped);
m_sumAUC += auc;
}
public void compute(int count) {
m_auc = m_sumAUC / count;
}
public double getValue() { return m_auc;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
class MetricNDCG implements IRankingMetric<Integer> {
private double m_sumNDCG;
private double m_ndcg;
public String getName () { return "NDCG";}
public void init(Recommender rec) {
m_sumNDCG = 0.0;
m_ndcg = -1;
}
public void updateWithList(List<Integer> results, List<Integer> test, int numDropped) {
double ndcg = Measures.nDCG(results, test);
m_sumNDCG += ndcg;
}
public void compute(int count) {
m_ndcg = m_sumNDCG / count;
}
public double getValue() { return m_ndcg;}
public String getValueAsString () {
return MetricCollection.ValueFormatString.format(this.getName(), this.getValue());
}
}
| 6,385 | 26.059322 | 95 | java |
librec | librec-master/librec/src/main/java/librec/metric/ITimeMetric.java | package librec.metric;
import librec.intf.Recommender;
import librec.util.Dates;
/**
* Really just a placeholder for storing timing information. No computation required.
* Created by rdburke on 8/1/16.
*/
public interface ITimeMetric extends IMetric {
public void setTime(double time);
}
/**
* Created by rdburke on 8/2/16.
*/
class TestTime implements ITimeMetric {
private Double m_time;
private String m_dateString;
public TestTime () {
m_time = -1.0;
m_dateString = "<unknown>";
}
public void init (Recommender rec) { }
public void compute (int count) { }
public String getName() { return "TestTime"; }
public void setTime (double time) {
m_time = time;
m_dateString = Dates.parse(m_time.longValue());
}
public double getValue () { return m_time; }
public String getValueAsString() { return m_dateString; }
}
class TrainTime implements ITimeMetric {
private Double m_time;
private String m_dateString;
public TrainTime () {
m_time = -1.0;
m_dateString = "<unknown>";
}
public void init (Recommender rec) { }
public void compute (int count) { }
public String getName() { return "TrainTime"; }
public void setTime (double time) {
m_time = time;
m_dateString = Dates.parse(m_time.longValue());
}
public double getValue () { return m_time; }
public String getValueAsString() { return m_dateString; }
}
| 1,482 | 21.134328 | 85 | java |
librec | librec-master/librec/src/main/java/librec/util/Sortor.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.ArrayList;
import java.util.List;
/**
* Implementations for sorting algorithms, although a ready-to-use method is
* {@code Collections.sort()}.
*
* <p>
* refers to: http://en.wikipedia.org/wiki/Sorting_algorithm
* </p>
*
* @author guoguibing
*
*/
public class Sortor {
/**
* refers to: http://en.wikipedia.org/wiki/Insertion_sort
*
* <p>
* Every repetition of insertion sort removes an element from the input
* data, inserting it into the correct position in the already-sorted list,
* until no input elements remain.
* </p>
*
*/
@SuppressWarnings("unchecked")
public static <T> void insertion(List<? extends Comparable<T>> data) {
int N = data.size();
for (int i = 1; i < N; i++) {
for (int j = i; j >= 1; j--) {
if (data.get(j).compareTo((T) data.get(j - 1)) < 0) {
swap((List<Comparable<T>>) data, j - 1, j);
} else {
break;
}
}
}
}
/**
* refers to: http://en.wikipedia.org/wiki/Selection_sort
*
*/
@SuppressWarnings("unchecked")
public static <T> void selection(List<? extends Comparable<T>> data) {
int N = data.size();
for (int i = 0; i < N - 1; i++) {
Comparable<T> min = data.get(i);
int index = i;
for (int j = i + 1; j < N; j++) {
if (min.compareTo((T) data.get(j)) > 0) {
min = data.get(j);
index = j;
}
}
if (index > i)
swap((List<Comparable<T>>) data, i, index);
}
}
/**
* a.k.a <em>sinking sort</em>, refers to:
* http://en.wikipedia.org/wiki/Bubble_sort
*
*/
@SuppressWarnings("unchecked")
public static <T> void bubble(List<? extends Comparable<T>> data) {
int N = data.size();
int count = 0;
while (true) {
int i = 0;
boolean swap = false;
Comparable<T> t0 = data.get(i);
for (int j = 1; j < N - count; j++) {
Comparable<T> t1 = data.get(j);
if (t0.compareTo((T) t1) > 0) {
swap((List<Comparable<T>>) data, i, j);
swap = true;
}
i = j;
t0 = data.get(i);
}
count++;
if (Debug.OFF)
Logs.debug("Step " + count + ": " + Strings.toString(data));
// if swap = false, means no swapping is occurred in last iteration, i.e. already sorted
if (!swap)
break;
}
}
/**
* refers to: http://en.wikipedia.org/wiki/Shell_sort
*
* <p>
* It improves the insertion sort by using a greater gap.
* </p>
*
*/
@SuppressWarnings("unchecked")
public static <T> void shell(List<? extends Comparable<T>> d) {
List<Comparable<T>> data = (List<Comparable<T>>) d;
int N = data.size();
/* Marcin Ciura's gap sequence */
int[] gaps = { 701, 301, 132, 57, 23, 10, 4, 1 };
for (int k = 0; k < gaps.length; k++) {
for (int i = gaps[k]; i < N; i++) {
Comparable<T> temp = data.get(i);
int j = 0;
for (j = i; j >= gaps[k] && data.get(j - gaps[k]).compareTo((T) temp) > 0; j -= gaps[k]) {
data.set(j, data.get(j - gaps[k]));
}
data.set(j, temp);
}
}
}
/**
* refers to: http://en.wikipedia.org/wiki/Comb_sort
*
* <p>
* It improves the bubble sort by using a greater gap. In bubble sort, when
* any two elements are compared, they always have a gap (distance from each
* other) of 1. The basic idea of comb sort is that the gap can be much more
* than 1. (Shell sort is also based on this idea, but it is a modification
* of insertion sort rather than bubble sort).
* </p>
*
* <p>
* In other words, the inner loop of bubble sort, which does the actual
* swap, is modified such that gap between swapped elements goes down (for
* each iteration of outer loop) in steps of shrink factor. i.e. [input size
* / shrink factor, input size / shrink factor^2, input size / shrink
* factor^3, .... , 1 ]. Unlike in bubble sort, where the gap is constant
* i.e. 1.
* </p>
*
* <p>
* The shrink factor has a great effect on the efficiency of comb sort. In
* the original article, the author suggested 4/3=1.3334. A value too small
* slows the algorithm down because more comparisons must be made, whereas a
* value too large means that no comparisons will be made. A better shrink
* factor is 1/(1-1/e^phi)=1.24733, where phi is the golden ratio.
* </p>
*
* <p>
* The gap starts out as the length of the list being sorted divided by the
* shrink factor (generally 1.3), and the list is sorted with that value
* (rounded down to an integer if needed) as the gap. Then the gap is
* divided by the shrink factor again, the list is sorted with this new gap,
* and the process repeats until the gap is 1. At this point, comb sort
* continues using a gap of 1 until the list is fully sorted. The final
* stage of the sort is thus equivalent to a bubble sort, but by this time
* most turtles (small values) have been dealt with, so a bubble sort will
* be efficient.
* </p>
*
* <p>
* Guibing note: the essential idea is to move the turtle values faster to
* the beginning of list by setting a greater gap.
* </p>
*/
@SuppressWarnings("unchecked")
public static <T> void comb(List<? extends Comparable<T>> d) {
int N = d.size();
double factor = 1.24733;
while (true) {
int gap = (int) (N / factor);
if (gap >= 1)
factor *= factor;
else
break;
/* bubble sort (swap) */
for (int i = 0; i + gap < N; i++) {
Comparable<T> t0 = d.get(i);
Comparable<T> t1 = d.get(i + gap);
if (t0.compareTo((T) t1) > 0)
swap((List<Comparable<T>>) d, i, i + gap);
}
}
}
/**
* refers to: http://en.wikipedia.org/wiki/Merge_sort
*
*/
@SuppressWarnings("unchecked")
public static <T> List<? extends Comparable<T>> merge(List<? extends Comparable<T>> d) {
int N = d.size();
if (N <= 1)
return d;
List<Comparable<T>> left = new ArrayList<>();
List<Comparable<T>> right = new ArrayList<>();
int mid = N / 2;
for (int i = 0; i < N; i++) {
if (i < mid)
left.add(d.get(i));
else
right.add(d.get(i));
}
left = (List<Comparable<T>>) merge(left);
right = (List<Comparable<T>>) merge(right);
return combine(left, right);
}
@SuppressWarnings("unchecked")
private static <T> List<? extends Comparable<T>> combine(List<? extends Comparable<T>> left,
List<? extends Comparable<T>> right) {
List<Comparable<T>> result = new ArrayList<>();
while (left.size() > 0 || right.size() > 0) {
if (left.size() > 0 && right.size() > 0) {
if (left.get(0).compareTo((T) right.get(0)) < 0) {
result.add(left.get(0));
left.remove(0);
} else {
result.add(right.get(0));
right.remove(0);
}
} else if (left.size() > 0) {
result.add(left.get(0));
left.remove(0);
} else if (right.size() > 0) {
result.add(right.get(0));
right.remove(0);
}
}
return result;
}
public static <T> void swap(List<Comparable<T>> data, int i, int j) {
Comparable<T> swap = data.get(i);
data.set(i, data.get(j));
data.set(j, swap);
}
/**
* Return k largest elements (sorted) and their indices from a given array.
* The original array will be changed, so refer to the first k element of
* array1 and array2 after calling this method.
*
* @param array1
* original array of data elements
* @param array2
* original array containing data index
* @param first
* the first element in the array. Use 0 to deal with the whole
* array.
* @param last
* the last element in the array. Use the maximum index of the
* array to deal with the whole array.
* @param k
* the number of items
*/
public static void kLargest(double[] array1, int[] array2, int first, int last, int k) {
int pivotIndex;
int firstIndex = first;
int lastIndex = last;
while (lastIndex > k * 10) {
pivotIndex = partition(array1, array2, firstIndex, lastIndex, false);
if (pivotIndex < k) {
firstIndex = pivotIndex + 1;
} else if (pivotIndex < k * 10) { // go out and sort
lastIndex = pivotIndex;
break;
} else {
lastIndex = pivotIndex;
}
}
quickSort(array1, array2, first, lastIndex, false);
}
/**
* Return k smallest elements (sorted) and their indices from a given array.
* The original array will be changed, so refer to the first k element of
* array1 and array2 after calling this method.
*
* @param array1
* original array of data elements
* @param array2
* original array containing data index
* @param first
* the first element in the array. Use 0 to deal with the whole
* array.
* @param last
* the last element in the array. Use the maximum index of the
* array to deal with the whole array.
* @param k
* the number of items
*/
public static void kSmallest(double[] array1, int[] array2, int first, int last, int k) {
int pivotIndex;
int firstIndex = first;
int lastIndex = last;
while (lastIndex > k * 10) {
pivotIndex = partition(array1, array2, firstIndex, lastIndex, true);
if (pivotIndex < k) {
firstIndex = pivotIndex + 1;
} else if (pivotIndex < k * 10) { // go out and sort
lastIndex = pivotIndex;
break;
} else {
lastIndex = pivotIndex;
}
}
quickSort(array1, array2, first, lastIndex, true);
}
/**
* Sort the given array. The original array will be sorted.
*
* @param array
* original array of data elements
* @param first
* the first element to be sorted in the array. Use 0 for sorting
* the whole array.
* @param last
* the last element to be sorted in the array. Use the maximum
* index of the array for sorting the whole array.
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
*/
public static void quickSort(int[] array, int first, int last, boolean increasingOrder) {
int pivotIndex;
if (first < last) {
pivotIndex = partition(array, first, last, increasingOrder);
quickSort(array, first, pivotIndex - 1, increasingOrder);
quickSort(array, pivotIndex + 1, last, increasingOrder);
}
}
/**
* Sort the given array, and returns original index as well. The original
* array will be sorted.
*
* @param array1
* original array of data elements
* @param array2
* original array containing data index
* @param first
* the first element to be sorted in the array. Use 0 for sorting
* the whole array.
* @param last
* the last element to be sorted in the array. Use the maximum
* index of the array for sorting the whole array.
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
*/
public static void quickSort(double[] array1, int[] array2, int first, int last, boolean increasingOrder) {
int pivotIndex;
if (first < last) {
pivotIndex = partition(array1, array2, first, last, increasingOrder);
quickSort(array1, array2, first, pivotIndex - 1, increasingOrder);
quickSort(array1, array2, pivotIndex + 1, last, increasingOrder);
}
}
/**
* Sort the given array, and returns original index as well. The original
* array will be sorted.
*
* @param array1
* original array of data elements of type int
* @param array2
* original array containing data index
* @param first
* the first element to be sorted in the array. Use 0 for sorting
* the whole array.
* @param last
* the last element to be sorted in the array. Use the maximum
* index of the array for sorting the whole array.
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
*/
public static void quickSort(int[] array1, int[] array2, int first, int last, boolean increasingOrder) {
int pivotIndex;
if (first < last) {
pivotIndex = partition(array1, array2, first, last, increasingOrder);
quickSort(array1, array2, first, pivotIndex - 1, increasingOrder);
quickSort(array1, array2, pivotIndex + 1, last, increasingOrder);
}
}
/**
* Sort the given array, and returns original index as well. The original
* array will be sorted.
*
* @param array1
* original array of data elements of type int
* @param array2
* original array containing data of type double
* @param first
* the first element to be sorted in the array. Use 0 for sorting
* the whole array.
* @param last
* the last element to be sorted in the array. Use the maximum
* index of the array for sorting the whole array.
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
*/
public static void quickSort(int[] array1, double[] array2, int first, int last, boolean increasingOrder) {
int pivotIndex;
if (first < last) {
pivotIndex = partition(array1, array2, first, last, increasingOrder);
quickSort(array1, array2, first, pivotIndex - 1, increasingOrder);
quickSort(array1, array2, pivotIndex + 1, last, increasingOrder);
}
}
/**
* Partition the given array into two section: smaller and larger than
* threshold. The threshold is selected from the first element of original
* array.
*
* @param array
* original array of data elements
* @param first
* the first element in the array
* @param last
* the last element in the array
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
* @return the index of threshold item after partitioning
*/
private static int partition(int[] array, int first, int last, boolean increasingOrder) {
int tmpInt;
int pivot = array[first];
int lastS1 = first;
for (int firstUnknown = first + 1; firstUnknown <= last; ++firstUnknown) {
if (increasingOrder) {
if (array[firstUnknown] < pivot) {
++lastS1;
tmpInt = array[firstUnknown];
array[firstUnknown] = array[lastS1];
array[lastS1] = tmpInt;
}
} else {
if (array[firstUnknown] > pivot) {
++lastS1;
tmpInt = array[firstUnknown];
array[firstUnknown] = array[lastS1];
array[lastS1] = tmpInt;
}
}
}
tmpInt = array[first];
array[first] = array[lastS1];
array[lastS1] = tmpInt;
return lastS1;
}
/**
* Partition the given array into two section: smaller and larger than
* threshold. The threshold is selected from the first element of original
* array.
*
* @param array1
* original array of data elements
* @param array2
* original array containing data index
* @param first
* the first element in the array
* @param last
* the last element in the array
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
* @return the index of threshold item after partitioning
*/
private static int partition(double[] array1, int[] array2, int first, int last, boolean increasingOrder) {
double tmpDouble;
int tmpInt;
double pivot = array1[first];
int lastS1 = first;
for (int firstUnknown = first + 1; firstUnknown <= last; ++firstUnknown) {
if (increasingOrder) {
if (array1[firstUnknown] < pivot) {
++lastS1;
tmpDouble = array1[firstUnknown];
array1[firstUnknown] = array1[lastS1];
array1[lastS1] = tmpDouble;
tmpInt = array2[firstUnknown];
array2[firstUnknown] = array2[lastS1];
array2[lastS1] = tmpInt;
}
} else {
if (array1[firstUnknown] > pivot) {
++lastS1;
tmpDouble = array1[firstUnknown];
array1[firstUnknown] = array1[lastS1];
array1[lastS1] = tmpDouble;
tmpInt = array2[firstUnknown];
array2[firstUnknown] = array2[lastS1];
array2[lastS1] = tmpInt;
}
}
}
tmpDouble = array1[first];
array1[first] = array1[lastS1];
array1[lastS1] = tmpDouble;
tmpInt = array2[first];
array2[first] = array2[lastS1];
array2[lastS1] = tmpInt;
return lastS1;
}
/**
* Partition the given array into two section: smaller and larger than
* threshold. The threshold is selected from the first element of original
* array.
*
* @param array1
* original array of data elements of type int
* @param array2
* original array containing data index
* @param first
* the first element in the array
* @param last
* the last element in the array
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
* @return the index of threshold item after partitioning
*/
private static int partition(int[] array1, int[] array2, int first, int last, boolean increasingOrder) {
int tmp1;
int tmpInt;
int pivot = array1[first];
int lastS1 = first;
for (int firstUnknown = first + 1; firstUnknown <= last; ++firstUnknown) {
if (increasingOrder) {
if (array1[firstUnknown] < pivot) {
++lastS1;
tmp1 = array1[firstUnknown];
array1[firstUnknown] = array1[lastS1];
array1[lastS1] = tmp1;
tmpInt = array2[firstUnknown];
array2[firstUnknown] = array2[lastS1];
array2[lastS1] = tmpInt;
}
} else {
if (array1[firstUnknown] > pivot) {
++lastS1;
tmp1 = array1[firstUnknown];
array1[firstUnknown] = array1[lastS1];
array1[lastS1] = tmp1;
tmpInt = array2[firstUnknown];
array2[firstUnknown] = array2[lastS1];
array2[lastS1] = tmpInt;
}
}
}
tmp1 = array1[first];
array1[first] = array1[lastS1];
array1[lastS1] = tmp1;
tmpInt = array2[first];
array2[first] = array2[lastS1];
array2[lastS1] = tmpInt;
return lastS1;
}
/**
* Partition the given array into two section: smaller and larger than
* threshold. The threshold is selected from the first element of original
* array.
*
* @param array1
* original array of data elements of type int
* @param array2
* original array containing data of type double
* @param first
* the first element in the array
* @param last
* the last element in the array
* @param increasingOrder
* indicating the sort is in increasing order. Use true for
* increasing order, false for decreasing order.
* @return the index of threshold item after partitioning
*/
private static int partition(int[] array1, double[] array2, int first, int last, boolean increasingOrder) {
int tmp1;
double tmp2;
int pivot = array1[first];
int lastS1 = first;
for (int firstUnknown = first + 1; firstUnknown <= last; ++firstUnknown) {
if (increasingOrder) {
if (array1[firstUnknown] < pivot) {
++lastS1;
tmp1 = array1[firstUnknown];
array1[firstUnknown] = array1[lastS1];
array1[lastS1] = tmp1;
tmp2 = array2[firstUnknown];
array2[firstUnknown] = array2[lastS1];
array2[lastS1] = tmp2;
}
} else {
if (array1[firstUnknown] > pivot) {
++lastS1;
tmp1 = array1[firstUnknown];
array1[firstUnknown] = array1[lastS1];
array1[lastS1] = tmp1;
tmp2 = array2[firstUnknown];
array2[firstUnknown] = array2[lastS1];
array2[lastS1] = tmp2;
}
}
}
tmp1 = array1[first];
array1[first] = array1[lastS1];
array1[lastS1] = tmp1;
tmp2 = array2[first];
array2[first] = array2[lastS1];
array2[lastS1] = tmp2;
return lastS1;
}
}
| 20,633 | 28.061972 | 108 | java |
librec | librec-master/librec/src/main/java/librec/util/Maths.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import static java.lang.Math.exp;
public class Maths {
/**
* Golden ratio: http://en.wikipedia.org/wiki/Golden_ratio
*
* <p>
* (a+b)/a = a/b = phi (golden ratio) = 1.618033988749895
* </p>
*/
public final static double golden_ratio = 0.5 * (Math.sqrt(5) + 1);
public final static double zero = 1e-6;
public static boolean isEqual(double d1, double d2) {
return Math.abs(d1 - d2) < zero;
}
/**
* Check if given string is a number (digits only)
*
* @param string
* @return
*/
public static boolean isNumber(String string) {
return string.matches("^\\d+$");
}
/**
* Check if given string is numeric (-+0..9(.)0...9)
*
* @param string
* @return
*/
public static boolean isNumeric(String string) {
return string.matches("^[-+]?\\d+(\\.\\d+)?$");
}
/**
* Check if given string is number with dot separator and two decimals.
*
* @param string
* @return
*/
public static boolean isNumberWith2Decimals(String string) {
return string.matches("^\\d+\\.\\d{2}$");
}
public static boolean isInt(double data) {
return (Maths.isEqual(data, Math.floor(data))) && !Double.isInfinite(data);
}
/**
* @return n!
*/
public static int factorial(int n) {
if (n < 0)
return 0;
if (n == 0 || n == 1)
return 1;
else
return n * factorial(n - 1);
}
/**
* @return ln(e)=log_e(n)
*/
public static double ln(double n) {
return Math.log(n);
}
public static double log(double n, int base) {
return Math.log(n) / Math.log(base);
}
/**
* given log(a) and log(b), return log(a + b)
*/
public static double logSum(double log_a, double log_b) {
double v;
if (log_a < log_b) {
v = log_b + Math.log(1 + exp(log_a - log_b));
} else {
v = log_a + Math.log(1 + exp(log_b - log_a));
}
return (v);
}
/**
* logistic function g(x)
*/
public static double logistic(double x) {
return g(x);
}
/**
* logistic function g(x)
*/
public static double g(double x) {
return 1.0 / (1.0 + Math.exp(-x));
}
/**
* gradient value of logistic function g(x)
*/
public static double gd(double x) {
return g(x) * g(-x);
}
/**
* get the normalized value using min-max normalizaiton
*
* @param x
* value to be normalized
* @param min
* min value
* @param max
* max value
* @return normalized value
*/
public static double normalize(double x, double min, double max) {
if (max > min)
return (x - min) / (max - min);
else if (isEqual(min, max))
return x / max;
return x;
}
/**
* Fabonacci sequence
*
*/
public static int fabonacci(int n) {
assert n > 0;
if (n == 1)
return 0;
else if (n == 2)
return 1;
else
return fabonacci(n - 1) + fabonacci(n - 2);
}
/**
* greatest common divisor (gcd) or greatest common factor (gcf)
*
* <p>
* reference: http://en.wikipedia.org/wiki/Greatest_common_divisor
* </p>
*
*/
public static int gcd(int a, int b) {
if (b == 0)
return a;
else
return gcd(b, a % b);
}
/**
* least common multiple (lcm)
*
*/
public static int lcm(int a, int b) {
if (a > 0 && b > 0)
return (int) ((0.0 + a * b) / gcd(a, b));
else
return 0;
}
/** sqrt(a^2 + b^2) without under/overflow. **/
public static double hypot(double a, double b) {
double r;
if (Math.abs(a) > Math.abs(b)) {
r = b / a;
r = Math.abs(a) * Math.sqrt(1 + r * r);
} else if (!isEqual(b, 0.0)) {
r = a / b;
r = Math.abs(b) * Math.sqrt(1 + r * r);
} else {
r = 0.0;
}
return r;
}
}
| 4,276 | 19.464115 | 77 | java |
librec | librec-master/librec/src/main/java/librec/util/FileConfiger.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
/**
* A configure class for .conf/.properties file
*
* @author guoguibing
*
*/
public class FileConfiger extends StringMap {
private Properties p = null;
public FileConfiger(String conf) throws Exception {
p = new Properties();
p.load(new FileInputStream(FileIO.getResource(conf)));
}
public LineConfiger getParamOptions(String key) {
String lineOptions = getString(key);
return lineOptions == null ? null : new LineConfiger(lineOptions);
}
/**
* @return the key value as a trimmed string
*
*/
public String getString(String key) {
String str = p.getProperty(key);
return str == null ? str : str.trim();
}
/**
* set a value to a specific key
*
* @param key
* property key
* @param val
* property value
*/
public void setString(String key, String val) {
p.setProperty(key, val);
}
/**
* @return the file IO path: supporting windows, linux and unix
*/
public String getPath(String key) {
// first search key itself
String path = getString(key);
if (path != null)
return path;
// if not, considering the following cases
switch (Systems.getOs()) {
case Windows:
return getString(key + ".wins");
case Linux:
case Mac:
default:
return getString(key + ".lins");
}
}
/**
* return a set values in a specified range
*
*/
public List<Float> getRange(String key) {
// a set of values in two ways: one is individual value (e.g., "1", "0.5"); the other is range values in
// the form of "a,b,c" (e.g, "0.5,0.8,0.6") *
String delim = "[, \t]";
String str = getString(key);
StringTokenizer st = new StringTokenizer(str, delim);
if (st.countTokens() > 1)
return getMultiValues(str, delim);
// regularized values: [min..step..max]: min, min+step, ..., max
if (str.contains(".."))
return getPlusValues(str);
// regularized values: [min**step**max]: min, min*step, ..., max
if (str.contains("**"))
return getTimesValues(str);
// single value only
List<Float> res = new ArrayList<>();
res.add(getFloat(key));
return res;
}
private List<Float> getPlusValues(String str) {
List<Float> vals = getMultiValues(str, "(\\.\\.)");
if (vals.size() < 3)
return vals;
// value ranges
float min = vals.get(0), step = vals.get(1), max = vals.get(2);
vals.clear();
if (min > max) {
// inverse orer from max --> min
while (min > max) {
vals.add(min);
min -= step;
}
vals.add(max);
} else {
while (min < max) {
vals.add(min);
min += step;
}
// no repeated values
if (Math.abs(max + step - min) > 1e-6)
vals.add(max);
}
return vals;
}
private List<Float> getTimesValues(String str) {
List<Float> vals = getMultiValues(str, "(\\*\\*)");
if (vals.size() < 3)
return vals;
// value ranges
float min = vals.get(0), step = vals.get(1), max = vals.get(2);
vals.clear();
if (min > max) {
// inverse orer from max --> min
while (min > max) {
vals.add(min);
min /= step;
}
vals.add(max);
} else {
while (min < max) {
vals.add(min);
min *= step;
}
// no repeated values
if (Math.abs(max + step - min) > 1e-6)
vals.add(max);
}
return vals;
}
/**
* return a set of float values set for a key, separated by the string "reg"
*
*/
public List<Float> getMultiValues(String val, String reg) {
List<Float> values = new ArrayList<>();
if (val != null) {
String[] data = val.split(reg);
for (int i = 0; i < data.length; i++) {
values.add(new Float(data[i]));
}
}
return values;
}
@Override
public boolean contains(String key) {
return p.containsKey(key);
}
}
| 4,536 | 21.460396 | 106 | java |
librec | librec-master/librec/src/main/java/librec/util/Gaussian.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
/**
* <h3>Gaussian Distribution</h3>
* <p>
* The approximation is accurate to absolute error less than $8 * 10^(-16)$. <br/>
* Reference paper: George Marsaglia, Evaluating the Normal Distribution.<br/>
*
* http://introcs.cs.princeton.edu/java/21function/ <br>
* http://www.jstatsoft.org/v11/a04/paper
* </p>
*
*/
public class Gaussian {
/**
* standard Gaussian pdf
*/
public static double pdf(double x) {
return Math.exp(-x * x / 2) / Math.sqrt(2 * Math.PI);
}
/**
* Gaussian pdf with mean mu and stddev sigma
*/
public static double pdf(double x, double mu, double sigma) {
return pdf((x - mu) / sigma) / sigma;
}
/**
* standard Gaussian cdf using Taylor approximation;
*
* @return the probability that a random variable distributed according to the standard normal distribution (mean =
* 0 and stdev = 1) produces a value less than z
*/
public static double cdf(double z) {
if (z < -8.0)
return 0.0;
if (z > 8.0)
return 1.0;
double sum = 0.0, term = z;
for (int i = 3; sum + term != sum; i += 2) {
sum = sum + term;
term = term * z * z / i;
}
return 0.5 + sum * pdf(z);
}
/**
* Gaussian cdf with mean mu and stddev sigma
*
* @returnt he probability that a random variable X distributed normally with mean mu and stdev sigma produces a
* value less than z
*/
public static double cdf(double z, double mu, double sigma) {
return cdf((z - mu) / sigma);
}
/**
* Compute z for standard normal such that cdf(z) = y via bisection search
*/
public static double phiInverse(double y) {
return phiInverse(y, .00000001, -8, 8);
}
private static double phiInverse(double y, double delta, double lo, double hi) {
double mid = lo + (hi - lo) / 2;
if (hi - lo < delta)
return mid;
if (cdf(mid) > y)
return phiInverse(y, delta, lo, mid);
else
return phiInverse(y, delta, mid, hi);
}
/**
* Compute z for standard normal such that cdf(z, mu, sigma) = y via bisection search
*/
public static double phiInverse(double y, double mu, double sigma) {
return phiInverse2(y, mu, sigma, .00000001, (mu - 8 * sigma), (mu + 8 * sigma));
}
private static double phiInverse2(double y, double mu, double sigma, double delta, double lo, double hi) {
double mid = lo + (hi - lo) / 2;
if (hi - lo < delta)
return mid;
if (cdf(mid, mu, sigma) > y)
return phiInverse2(y, mu, sigma, delta, lo, mid);
else
return phiInverse2(y, mu, sigma, delta, mid, hi);
}
public static void main(String[] args) {
// This prints out the values of the probability density function for N(2.0.0.6)
// A graph of this is here: http://www.cs.bu.edu/fac/snyder/cs237/Lecture%20Materials/GaussianExampleJava.png
double mu = 2.0;
double sigma = 1.5;
System.out.println("PDF for N(2.0,0.6) in range [-4..8]:");
for (double z = -4.0; z <= 8.0; z += 0.2)
System.out.format("%.1f\t%.4f\n", z, pdf(z, mu, sigma));
// This prints out the values of the cumulative density function for N(2.0.0.6)
// A graph of this is here: http://www.cs.bu.edu/fac/snyder/cs237/Lecture%20Materials/GaussianExample2Java.png
System.out.println("CDF for N(2.0,0.6) in range [-4..8]:");
for (double z = -4.0; z <= 8.0; z += 0.2)
System.out.format("%.1f\t%.4f\n", z, cdf(z, mu, sigma));
// Calculates the probability that for N(2.0, 0.6), the random variable produces a value less than 3.45
System.out.format("\nIf X ~ N(2.0, 1.5), then P(X <= 3.2) is %.4f\n", cdf(3.2, 2.0, 1.5));
// Calculates the value x for X ~ N(2.0, 0.6) which is the 78.81% cutoff (i.e., 78.81% of the values lie below x and 21.19% above).
System.out.format("\nIf X ~ N(2.0, 1.5), then x such that P(X <= x ) = 0.7881 is %.4f\n",
phiInverse(0.7881, 2.0, 1.5));
}
}
| 4,495 | 31.817518 | 134 | java |
librec | librec-master/librec/src/main/java/librec/util/Sims.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Similarity measures
*
* @author Guo Guibing
*
*/
public class Sims {
/**
* @return cosine similarity
*/
public static double cos(List<Double> a, List<Double> b) {
if (Lists.isEmpty(a) || Lists.isEmpty(b) || a.size() != b.size())
return Double.NaN;
double sum = 0.0, sum_a = 0.0, sum_b = 0.0;
for (int i = 0; i < a.size(); i++) {
double ai = a.get(i), bi = b.get(i);
sum += ai * bi;
sum_a += ai * ai;
sum_b += bi * bi;
}
double val = Math.sqrt(sum_a) * Math.sqrt(sum_b);
return sum / val;
}
/**
* Calculate Constrained Pearson Correlation (CPC)
*
* @param u
* user u's ratings
* @param v
* user v's ratings
* @param median
* median rating in a rating range
*
* @return Constrained PCC Correlation (CPC)
*/
public static double cpc(List<Double> u, List<Double> v, double median) {
if (Lists.isEmpty(u) || Lists.isEmpty(v))
return Double.NaN;
double sumNum = 0.0, sumDen1 = 0.0, sumDen2 = 0.0;
for (int i = 0; i < u.size(); i++) {
double ui = u.get(i) - median;
double vi = v.get(i) - median;
sumNum += ui * vi;
sumDen1 += Math.pow(ui, 2);
sumDen2 += Math.pow(vi, 2);
}
return sumNum / (Math.sqrt(sumDen1) * Math.sqrt(sumDen2));
}
/**
* Calculate Mean Squared Difference (MSD) similarity proposed by Shardanand and Maes [1995]:
*
* <i>Social information filtering: Algorithms for automating "word of mouth"</i>
*
* @param u
* user u's ratings
* @param v
* user v's ratings
* @return MSD similarity
*/
public static double msd(List<Double> u, List<Double> v) {
double sum = 0.0;
for (int i = 0; i < u.size(); i++) {
double ui = u.get(i);
double vi = v.get(i);
sum += Math.pow(ui - vi, 2);
}
double sim = u.size() / sum;
if (Double.isInfinite(sim))
sim = 1.0;
return sim;
}
/**
* calculate Pearson Correlation Coefficient (PCC) between two vectors of ratings
*
* @param a
* first vector of ratings
* @param b
* second vector of ratings
* @return Pearson Correlation Coefficient (PCC) value. <br/>
* If vector a or b is null or the length is less than 2, Double.NaN is returned.
*/
public static double pcc(List<? extends Number> a, List<? extends Number> b) {
if (a == null || b == null || a.size() < 2 || b.size() < 2 || a.size() != b.size())
return Double.NaN;
double mu_a = Stats.mean(a);
double mu_b = Stats.mean(b);
double num = 0.0, den_a = 0.0, den_b = 0.0;
for (int i = 0; i < a.size(); i++) {
double ai = a.get(i).doubleValue() - mu_a;
double bi = b.get(i).doubleValue() - mu_b;
num += ai * bi;
den_a += ai * ai;
den_b += bi * bi;
}
return num / (Math.sqrt(den_a) * Math.sqrt(den_b));
}
/**
* calculate extend Jaccard Coefficient between two vectors of ratings
*/
public static double exJaccard(List<Double> a, List<Double> b) {
double num = 0.0, den_a = 0.0, den_b = 0.0;
for (int i = 0; i < a.size(); i++) {
double ai = a.get(i);
double bi = b.get(i);
num += ai * bi;
den_a += ai * ai;
den_b += bi * bi;
}
return num / (den_a + den_b - num);
}
/**
* calculate Dice Coefficient between two vectors of ratings
*/
public static double dice(List<Double> a, List<Double> b) {
double num = 0.0, den_a = 0.0, den_b = 0.0;
for (int i = 0; i < a.size(); i++) {
double ai = a.get(i);
double bi = b.get(i);
num += 2 * ai * bi;
den_a += ai * ai;
den_b += bi * bi;
}
return num / (den_a + den_b);
}
/**
* Jaccard's coefficient is defined as the number of common rated items of two users divided by the total number of
* their unique rated items.
*
* @return Jaccard's coefficient
*/
public static double jaccard(List<Integer> uItems, List<Integer> vItems) {
int common = 0, all = 0;
Set<Integer> items = new HashSet<>();
items.addAll(uItems);
items.addAll(vItems);
all = items.size();
common = uItems.size() + vItems.size() - all;
return (common + 0.0) / all;
}
/**
* Kendall Rank Correlation Coefficient
*
* @author Bin Wu
*
*/
public static double krcc(List<Double> uItems, List<Double> vItems) {
int common = 0, all = 0;
double sum = 0;
List<Integer> temp = new ArrayList<>();
Set<Double> items = new HashSet<>();
items.addAll(uItems);
items.addAll(vItems);
all = items.size();
common = uItems.size() + vItems.size() - all;
for (int i = 0; i < uItems.size(); i++) {
if (uItems.get(i) > 0 && vItems.get(i) > 0) {
temp.add(i);
}
}
for (int m = 0; m < temp.size(); m++) {
for (int n = m + 1; n < temp.size(); n++) {
if ((uItems.get(temp.get(m)) - uItems.get(temp.get(n)))
* (vItems.get(temp.get(m)) - vItems.get(temp.get(n))) < 0) {
sum += 1;
}
}
}
return 1 - 4 * sum / common / (common - 1);
}
}
| 5,706 | 23.921397 | 116 | java |
librec | librec-master/librec/src/main/java/librec/util/EMailer.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.io.FileInputStream;
import java.util.Date;
import java.util.Properties;
import javax.activation.DataHandler;
import javax.activation.FileDataSource;
import javax.mail.Authenticator;
import javax.mail.Message;
import javax.mail.Multipart;
import javax.mail.PasswordAuthentication;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
/**
* Generic Email Class, Read configuration from email.properties file
*
* @author guoguibing
*
*/
public class EMailer {
protected String from;
protected String to;
protected String cc;
protected String bcc;
protected String subject;
protected String text;
protected String attachment;
protected Properties props = new Properties();
protected void config(String filename) throws Exception {
props.load(new FileInputStream(filename));
}
public void send() throws Exception {
if (text == null)
text = props.getProperty("mail.text");
if (attachment == null)
attachment = props.getProperty("mail.attachment");
send(text, attachment);
}
public void config163() {
props.setProperty("mail.transport.protocol", "smtp");
props.setProperty("mail.smtp.host", "smtp.163.com");
props.setProperty("mail.smtp.port", "25");
props.setProperty("mail.smtp.auth", "true");
}
public void send(String text) throws Exception {
send(text, null);
}
public void send(String text, String attachment) throws Exception {
Session session = Session.getInstance(props, new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(props.getProperty("mail.smtp.user"), props
.getProperty("mail.smtp.password"));
}
});
if (from == null)
from = props.getProperty("mail.from");
if (to == null)
to = props.getProperty("mail.to");
if (cc == null)
cc = props.getProperty("mail.cc");
if (bcc == null)
bcc = props.getProperty("mail.bcc");
if (subject == null)
subject = props.getProperty("mail.subject");
Message msg = new MimeMessage(session);
msg.setFrom(new InternetAddress(from));
msg.setRecipient(Message.RecipientType.TO, new InternetAddress(to));
if (cc != null)
msg.setRecipient(Message.RecipientType.CC, new InternetAddress(cc));
if (bcc != null)
msg.setRecipient(Message.RecipientType.BCC, new InternetAddress(bcc));
msg.setSubject(subject);
msg.setSentDate(new Date());
if (attachment != null) {
MimeBodyPart tp = new MimeBodyPart();
tp.setText(text);
MimeBodyPart ap = new MimeBodyPart();
FileDataSource fds = new FileDataSource(attachment);
ap.setDataHandler(new DataHandler(fds));
ap.setFileName(fds.getName());
Multipart mp = new MimeMultipart();
mp.addBodyPart(tp);
mp.addBodyPart(ap);
msg.setContent(mp);
} else {
msg.setText(text);
}
Transport.send(msg);
Logs.debug("Have sent an email notification to {}. ", to);
}
public Properties getProps() {
return props;
}
}
| 3,841 | 26.640288 | 80 | java |
librec | librec-master/librec/src/main/java/librec/util/LineConfiger.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A configer for a line of string
*
* @author Guo Guibing
*
*/
public class LineConfiger extends StringMap {
private Map<String, List<String>> params = null;
private static final String headKey = "main.paramater";
public LineConfiger(String setup) {
this(setup.split("[,\t ]"));
}
public LineConfiger(String[] parameters) {
params = new HashMap<>();
// parameter head
int i = 0;
String head = parameters[i];
if (!(head.startsWith("-") || head.startsWith("--"))) {
params.put(headKey, Arrays.asList(head));
i++;
}
// parameter options
List<String> vals = null;
for (; i < parameters.length; i++) {
boolean isString = !Maths.isNumeric(parameters[i]);
boolean isWithDash = parameters[i].startsWith("-") || parameters[i].startsWith("--");
// remove cases like -1, -2 values
if (isWithDash && isString) {
vals = new ArrayList<>();
params.put(parameters[i], vals);
} else {
vals.add(parameters[i]);
}
}
}
public List<String> getOptions(String key) {
return params.containsKey(key) ? params.get(key) : null;
}
public String getMainParam() {
return getString(headKey);
}
public boolean isMainOn() {
return Strings.isOn(getMainParam());
}
public String getString(String key) {
List<String> options = this.getOptions(key);
if (options != null && options.size() > 0)
return options.get(0);
return null;
}
@Override
public boolean contains(String key) {
return params.containsKey(key);
}
}
| 2,347 | 23.715789 | 88 | java |
librec | librec-master/librec/src/main/java/librec/util/Dates.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.text.SimpleDateFormat;
public class Dates {
public final static String PATTERN_yyyy_MM_dd = "yyyy-MM-dd";
public final static String PATTERN_dd_MM_yyyy = "dd/MM/yyyy";
public final static String PATTERN_MM_dd_yyyy = "MM/dd/yyyy";
public final static String PATTERN_yyyy_MM_dd_HH_mm_SS = "yyyy-MM-dd HH-mm-SS";
private static final SimpleDateFormat sdf = new SimpleDateFormat(PATTERN_yyyy_MM_dd_HH_mm_SS);
public static java.sql.Date parse(String date) throws Exception {
return parse(date, PATTERN_yyyy_MM_dd);
}
public static java.sql.Date parse(String date, String pattern) throws Exception {
SimpleDateFormat sdf = new SimpleDateFormat(pattern);
return new java.sql.Date(sdf.parse(date).getTime());
}
public static String toString(long mms, String pattern) throws Exception {
SimpleDateFormat sdf = new SimpleDateFormat(pattern);
return sdf.format(new java.sql.Date(mms));
}
public static String toString(long mms) throws Exception {
return sdf.format(new java.sql.Date(mms));
}
public static String now() {
return sdf.format(new java.util.Date());
}
/**
* Convert time in milliseconds to human-readable format.
*
* @param msType
* The time in milliseconds
* @return a human-readable string version of the time
*/
public static String parse(long msType) {
long original = msType;
int ms = (int) (msType % 1000);
original = original / 1000;
int sec = (int) (original % 60);
original = original / 60;
int min = (int) (original % 60);
original = original / 60;
int hr = (int) (original % 24);
original = original / 24;
int day = (int) original;
if (day > 1) {
return String.format("%d days, %02d:%02d:%02d.%03d", day, hr, min, sec, ms);
} else if (day > 0) {
return String.format("%d day, %02d:%02d:%02d.%03d", day, hr, min, sec, ms);
} else if (hr > 0) {
return String.format("%02d:%02d:%02d", hr, min, sec);
} else {
return String.format("%02d:%02d", min, sec);
}
}
}
| 2,723 | 29.954545 | 95 | java |
librec | librec-master/librec/src/main/java/librec/util/StringMap.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
/**
* Abstract class that uses <string, string> as <key, value> parameter-value map
*
* @author Guo Guibing
*
*/
public abstract class StringMap {
public abstract String getString(String key);
public String getString(String key, String val) {
String value = getString(key);
return value == null ? val : value;
}
public float getFloat(String key) {
return Strings.toFloat(getString(key));
}
public float getFloat(String key, float val) {
return Strings.toFloat(getString(key), val);
}
public int getInt(String key) {
return Strings.toInt(getString(key));
}
public int getInt(String key, int val) {
return Strings.toInt(getString(key), val);
}
public double getDouble(String key) {
return Strings.toDouble(getString(key));
}
public double getDouble(String key, double val) {
return Strings.toDouble(getString(key), val);
}
public long getLong(String key){
return Strings.toLong(key);
}
public long getLong(String key, long val){
return Strings.toLong(getString(key), val);
}
public boolean isOn(String key) {
return Strings.isOn(getString(key));
}
public boolean isOn(String key, boolean on) {
String value = getString(key);
return value != null ? Strings.isOn(value) : on;
}
public abstract boolean contains(String key);
}
| 2,025 | 23.707317 | 80 | java |
librec | librec-master/librec/src/main/java/librec/util/Debug.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Method;
/**
* Define constants or methods often used during the debug process
*
* @author guoguibing
*
*/
public class Debug
{
public final static boolean ON = true;
public final static boolean OFF = false;
public final static String dirPath = Systems.getDesktop();
/**
* Calculate the amount of memory used by calling obj's method
*
* @param obj
* the instance of caller
* @param method
* the method to be called
* @param args
* the arguments to be passed for this method
*
* @return How much memory is used by calling obj's method (in KByte)
* @throws Exception
*
*/
public static double memory(Object obj, Method method, Object... args) throws Exception
{
double mem = 0.0;
Runtime runtime = Runtime.getRuntime();
double start, end;
start = runtime.freeMemory();
method.invoke(obj, args);
end = runtime.freeMemory();
mem = end - start;
mem /= 1000.0;
return mem;
}
public static void pipeErrors()
{
pipeErrors(dirPath + "errors.txt");
}
/**
* Redirect system errors into a file
*
*/
public static void pipeErrors(String filePath)
{
try
{
System.setErr(new PrintStream(new File(filePath)));
} catch (Exception e)
{
e.printStackTrace();
}
}
public static void stopHere()
{
try
{
System.in.read();
} catch (IOException e)
{
e.printStackTrace();
}
}
public static void pipeConsoles()
{
pipeConsoles(dirPath + "console.txt");
}
/**
* Redirect system outputs into a file
*
*/
public static void pipeConsoles(String filePath)
{
try
{
System.setOut(new PrintStream(new File(filePath)));
} catch (Exception e)
{
e.printStackTrace();
}
}
}
| 2,553 | 20.283333 | 88 | java |
librec | librec-master/librec/src/main/java/librec/util/Strings.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.text.DecimalFormat;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.util.FileIO.Converter;
import librec.util.FileIO.MapWriter;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Multiset;
/**
* String Utility Class
*
* @author Guo Guibing
*
*/
public class Strings {
public static final String EMPTY = "";
private static String SEP = "\n";
private static final DecimalFormat intFormatter = new DecimalFormat("#,###");
/**
* <p>
* The maximum size to which the padding constant(s) can expand.
* </p>
*/
private static final int PAD_LIMIT = 8192;
/**
* get the last substring of string str with maximum length
*
* @param str
* source string
* @param maxLength
* maximum length of strings
* @return the last substring of string str with maximum length; if greater; then "..." is padded to start position
*/
public static String last(String str, int maxLength) {
if (str.length() + 3 <= maxLength)
return str;
return "..." + str.substring(str.length() - maxLength + 3);
}
public static float toFloat(String str) {
return Float.parseFloat(str);
}
public static float toFloat(String str, float val) {
return str != null ? Float.parseFloat(str) : val;
}
public static int toInt(String str) {
return Integer.parseInt(str);
}
public static int toInt(String str, int val) {
return str != null ? Integer.parseInt(str) : val;
}
public static long toLong(String str) {
return Long.parseLong(str);
}
public static long toLong(String str, long val) {
return str != null ? Long.parseLong(str) : val;
}
public static double toDouble(String str) {
return Double.parseDouble(str);
}
public static double toDouble(String str, double val) {
return str != null ? Double.parseDouble(str) : val;
}
public static void toClipboard(String data) throws Exception {
Clipboard cb = Toolkit.getDefaultToolkit().getSystemClipboard();
StringSelection ss = new StringSelection(data);
cb.setContents(ss, ss);
}
public static boolean isOn(String option) {
switch (option.toLowerCase()) {
case "on":
case "true":
return true;
case "off":
case "false":
default:
return false;
}
}
/**
* Concatenates an array of string
*
* @param objs
* the objects to be concatenated
* @param sep
* the separator between strings
* @return the concatenated strings
*/
public static String toString(Object[] objs, String sep) {
return Joiner.on(sep).skipNulls().join(objs);
}
/**
* default sep="," between all objects
*
*/
public static String toString(Object[] strings) {
return toString(strings, ", ");
}
/**
* <p>
* Returns padding using the specified delimiter repeated to a given length.
* </p>
*
* <pre>
* StringUtils.repeat(0, 'e') = ""
* StringUtils.repeat(3, 'e') = "eee"
* StringUtils.repeat(-2, 'e') = ""
* </pre>
*
* <p>
* Note: this method doesn't not support padding with <a
* href="http://www.unicode.org/glossary/#supplementary_character">Unicode Supplementary Characters</a> as they
* require a pair of {@code char}s to be represented. If you are needing to support full I18N of your applications
* consider using {@link #repeat(String, int)} instead.
* </p>
*
* @param ch
* character to repeat
* @param repeat
* number of times to repeat char, negative treated as zero
* @return String with repeated character
* @see #repeat(String, int)
*/
public static String repeat(char ch, int repeat) {
char[] buf = new char[repeat];
for (int i = repeat - 1; i >= 0; i--) {
buf[i] = ch;
}
return new String(buf);
}
/**
* <p>
* Repeat a String {@code repeat} times to form a new String.
* </p>
*
* <pre>
* StringUtils.repeat(null, 2) = null
* StringUtils.repeat("", 0) = ""
* StringUtils.repeat("", 2) = ""
* StringUtils.repeat("a", 3) = "aaa"
* StringUtils.repeat("ab", 2) = "abab"
* StringUtils.repeat("a", -2) = ""
* </pre>
*
* @param str
* the String to be repeated, may be null
* @param repeat
* number of times to repeat {@code str}, negative treated as zero
* @return a new String consisting of the original String repeated, {@code null} if null String input
*/
public static String repeat(String str, int repeat) {
if (str == null) {
return null;
}
if (repeat <= 0) {
return EMPTY;
}
int inputLength = str.length();
if (repeat == 1 || inputLength == 0) {
return str;
}
if (inputLength == 1 && repeat <= PAD_LIMIT) {
return repeat(str.charAt(0), repeat);
}
int outputLength = inputLength * repeat;
switch (inputLength) {
case 1:
return repeat(str.charAt(0), repeat);
case 2:
char ch0 = str.charAt(0);
char ch1 = str.charAt(1);
char[] output2 = new char[outputLength];
for (int i = repeat * 2 - 2; i >= 0; i--, i--) {
output2[i] = ch0;
output2[i + 1] = ch1;
}
return new String(output2);
default:
StringBuilder buf = new StringBuilder(outputLength);
for (int i = 0; i < repeat; i++) {
buf.append(str);
}
return buf.toString();
}
}
public static String toString(double data) {
return toString(data, 4);
}
public static String toString(long data) {
return intFormatter.format(data);
}
public static String toString(double[][] data) {
int rows = data.length;
StringBuilder sb = new StringBuilder();
sb.append("Dimension: " + rows + " x " + data[0].length + "\n");
for (int i = 0; i < rows; i++) {
sb.append("[");
for (int j = 0; j < data[i].length; j++) {
sb.append((float) data[i][j]);
if (j < data[i].length - 1)
sb.append("\t");
}
sb.append("]\n");
}
return sb.toString();
}
public static String toString(int[][] data) {
int rows = data.length;
StringBuilder sb = new StringBuilder();
sb.append("Dimension: " + rows + " x " + data[0].length + "\n");
for (int i = 0; i < rows; i++) {
sb.append("[");
for (int j = 0; j < data[i].length; j++) {
sb.append(data[i][j]);
if (j < data[i].length - 1)
sb.append("\t");
}
sb.append("]\n");
}
return sb.toString();
}
public static String toString(Number data, int bits) {
double val = data.doubleValue();
if (Maths.isInt(val))
return (int) val + "";
String format = "%." + bits + "f";
return String.format(format, val);
}
public static List<String> toList(String str, String reg) {
Iterable<String> iter = Splitter.on(reg).omitEmptyStrings().trimResults().split(str);
return Lists.newArrayList(iter);
}
public static String shortStr(String input) {
return shortStr(input, 50);
}
public static String shortStr(String input, int len) {
int begin = 0;
if (input.length() > len)
begin = input.length() - len;
return input.substring(begin);
}
public static <T> String toString(Collection<T> ts) {
if (ts instanceof Multiset<?>) {
StringBuilder sb = new StringBuilder();
Multiset<T> es = (Multiset<T>) ts;
for (T e : es.elementSet()) {
int count = es.count(e);
sb.append(e + ", " + count + "\n");
}
return sb.toString();
}
return toString(ts, ",");
}
public static <T> String toString(Collection<T> ts, String sep) {
return Joiner.on(sep).skipNulls().join(ts);
}
public static <T> String toString(Collection<T> ts, Converter<T, String> lw) throws Exception {
if (ts == null || ts.size() == 0)
return null;
StringBuilder sb = new StringBuilder();
int N = ts.size(), i = 0;
for (T t : ts) {
String line = lw != null ? lw.transform(t) : t.toString();
sb.append(line);
if (i++ < N - 1)
sb.append(SEP);
}
return sb.toString();
}
public static <K, V> String toString(Map<K, V> map) {
return toString(map, "\n");
}
public static <K, V> String toString(Map<K, V> map, String sep) {
return Joiner.on(sep).withKeyValueSeparator(" -> ").join(map);
}
public static <K, V> String toString(Map<K, V> map, MapWriter<K, V> mw) {
StringBuilder sb = new StringBuilder();
int size = map.size();
int count = 0;
for (Entry<K, V> en : map.entrySet()) {
K key = en.getKey();
V val = en.getValue();
String line = mw != null ? mw.processEntry(key, val) : key + " -> " + val;
sb.append(line);
if (count++ < size - 1)
sb.append(SEP);
}
return sb.toString();
}
public static String toString(double[] data) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < data.length; i++) {
sb.append(toString(data[i]));
if (i < data.length - 1)
sb.append(", ");
}
sb.append("]");
return sb.toString();
}
public static String toString(int[] data) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < data.length; i++) {
sb.append(data[i]);
if (i < data.length - 1)
sb.append(", ");
}
sb.append("]");
return sb.toString();
}
/**
* convert to a section of message
*/
public static String toSection(List<String> msgs) {
StringBuilder sb = new StringBuilder();
int repeat = 50;
sb.append(" *\n");
for (String msg : msgs) {
sb.append(" * " + msg + "\n");
if (msg.length() > repeat)
repeat = msg.length();
}
sb.append(" *\n");
String stars = Strings.repeat('*', repeat);
String head = "\n/*" + stars + "\n";
sb.insert(0, head);
String tail = " *" + stars + "/";
sb.append(tail);
return sb.toString();
}
}
| 10,429 | 23.71564 | 116 | java |
librec | librec-master/librec/src/main/java/librec/util/FileIO.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStreamWriter;
import java.net.URL;
import java.nio.channels.FileChannel;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.CRC32;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class FileIO {
// 1G in bytes or units
public static final long ONE_KB = 1024;
public static final long ONE_K = 1000;
// 1M in bytes or units
public static final long ONE_MB = ONE_KB * ONE_KB;
public static final long ONE_M = ONE_K * ONE_K;
// 1K in bytes or units
public static final long ONE_GB = ONE_KB * ONE_MB;
public static final long ONE_G = ONE_K * ONE_M;
// desktop path
public final static String desktop = Systems.getDesktop();
/* comma without consideration in the quotas, such as "boys, girls" */
public final static String comma = ",(?=([^\"]*\"[^\"]*\")*[^\"]*$)";
/**
* interface for converting an entry of a map to string
*
* @param <K>
* key type
* @param <V>
* value type
*/
public interface MapWriter<K, V> {
String processEntry(K key, V val);
}
/**
* Transform an input object with Type K to an output object with type T
*
* @param <K>
* type of input object
* @param <T>
* type of output object
*/
public interface Converter<K, T> {
T transform(K in) throws Exception;
}
/**
* Should not be instanced
*/
private FileIO() {
}
/**
* Returns a human-readable version of the file size, where the input represents a specific number of bytes.
*
* @param size
* the number of bytes
* @return a human-readable display value (includes units)
*/
public static String formatBytes(long size) {
String display;
if (size / ONE_GB > 0) {
display = String.format("%.2f", (size + 0.0) / ONE_GB) + " GB";
} else if (size / ONE_MB > 0) {
display = String.format("%.2f", (size + 0.0) / ONE_MB) + " MB";
} else if (size / ONE_KB > 0) {
display = String.format("%.2f", (size + 0.0) / ONE_KB) + " KB";
} else {
display = String.valueOf(size) + " bytes";
}
return display;
}
/**
* Returns a human-readable version of the file size.
*
* @param size
* the size of a file in units (not in bytes)
* @return a human-readable display value
*/
public static String formatSize(long size) {
String display;
if (size / ONE_G > 0) {
display = String.format("%.2f", (size + 0.0) / ONE_G) + " G";
} else if (size / ONE_M > 0) {
display = String.format("%.2f", (size + 0.0) / ONE_M) + " M";
} else if (size / ONE_K > 0) {
display = String.format("%.2f", (size + 0.0) / ONE_K) + " K";
} else {
display = String.valueOf(size);
}
return display;
}
/**
* Get resource path, supporting file and url io path
*
* @return path to the file
*/
public static String getResource(String filePath) {
if (FileIO.exist(filePath))
return filePath;
String path = makeDirPath(new String[] { "src", "main", "resources" }) + filePath;
if (FileIO.exist(path))
return path;
URL is = Class.class.getResource(filePath);
if (is != null)
return is.getFile();
is = Class.class.getResource(path);
if (is != null)
return is.getFile();
return null;
}
public static BufferedReader getReader(String path) throws FileNotFoundException {
return getReader(new File(getResource(path)));
}
public static BufferedReader getReader(File file) throws FileNotFoundException {
return new BufferedReader(new FileReader(file));
}
public static BufferedWriter getWriter(String path) throws Exception {
return getWriter(new File(path));
}
public static BufferedWriter getWriter(File file) throws Exception {
return new BufferedWriter(new FileWriter(file));
}
/**
* @return the name of current folder
*/
public static String getCurrentFolder() {
return Paths.get("").toAbsolutePath().getFileName().toString();
}
/**
* @return the path to current folder
*/
public static String getCurrentPath() {
return Paths.get("").toAbsolutePath().toString();
}
/**
* Make directory path: make sure the path is ended with file separator
*
* @param dirPath
* @return corrected directory path with file separator in the end
*/
public static String makeDirPath(String dirPath) {
switch (Systems.getOs()) {
case Windows:
dirPath = dirPath.replace('/', '\\');
break;
default:
dirPath = dirPath.replace('\\', '/');
break;
}
if (!dirPath.endsWith(Systems.FILE_SEPARATOR))
dirPath += Systems.FILE_SEPARATOR;
return dirPath;
}
/**
* make directory path using the names of directories
*
* @param dirs
* @return
* @throws Exception
*/
public static String makeDirPath(String... dirs) {
String dirPath = "";
for (String dir : dirs)
dirPath += makeDirPath(dir);
return dirPath;
}
/**
* Make directory if it does not exist
*
* @param dirPath
* @return Directory path with file separator in the end
*/
public static String makeDirectory(String dirPath) {
File dir = new File(dirPath);
if (!dir.exists())
dir.mkdirs();
return makeDirPath(dir.getPath());
}
/**
* Construct directory and return directory path
*
* @param dirs
* @return constructed directory path
*/
public static String makeDirectory(String... dirs) {
String dirPath = makeDirPath(dirs);
return makeDirectory(dirPath);
}
/**
* Write a string into a file
*
* @param filePath
* : the name of file to be written
* @param content
* : the content of a string to be written
* @throws Exception
*/
public static void writeString(String filePath, String content) throws Exception {
writeString(filePath, content, false);
}
public static void writeString(String filePath, String content, boolean append) throws Exception {
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePath, append), "UTF-8"));
if (content.endsWith("\n"))
bw.write(content);
else
bw.write(content + "\n");
bw.close();
}
public static <T> void writeList(String filePath, Collection<T> objs) throws Exception {
writeList(filePath, objs, null, false);
}
public static <T> void writeList(String filePath, Collection<T> objs, boolean append) throws Exception {
writeList(filePath, objs, null, append);
}
public synchronized static <T> void writeListSyn(String filePath, List<T> objs) throws Exception {
writeList(filePath, objs, null, false);
}
/**
* Write contents in Collection<T> to a file with the help of a writer helper
*
* @param <T>
* type of Objects in the collection
*
*/
public static <T> void writeList(String filePath, Collection<T> ts, Converter<T, String> lw, boolean append)
throws Exception {
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePath, append), "UTF-8"));
StringBuilder contents = new StringBuilder();
int count = 0;
for (T t : ts) {
contents.append(lw != null ? lw.transform(t) : t);
contents.append("\n");
count++;
if (count >= 1000) {
bw.write(contents.toString());
count = 0;
contents = new StringBuilder();
}
}
if (contents.capacity() > 0)
bw.write(contents.toString());
bw.close();
}
public static <T> void writeVector(String filePath, List<T> objs) throws Exception {
writeVector(filePath, objs, null, false);
}
public static <T> void writeVector(String filePath, List<T> ts, Converter<T, String> wh, boolean append)
throws Exception {
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filePath, append), "UTF-8"));
int i = 0;
StringBuilder sb = new StringBuilder();
for (T t : ts) {
sb.append(wh.transform(t));
if (++i < ts.size())
sb.append(", ");
}
bw.write(sb.toString() + "\n");
bw.close();
}
/**
* Read the content of a file, if keywords are specified, then only lines with these keywords will be read
*
* @param filePath
* the file to be read
* @param keywords
* the keywords of lines to be read
* @return the content of a file as string
* @throws Exception
*/
public static String readAsString(String filePath, String... keywords) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = br.readLine()) != null) {
if (keywords != null && keywords.length > 0) {
for (String keyword : keywords) {
if (line.contains(keyword)) {
sb.append(line + "\r\n");
break;
}
}
} else
sb.append(line + "\r\n");
}
br.close();
return sb.toString();
}
/**
* Read String from file at specified line numbers, e.g. read two lines at line position 10, 100, starting from line
* 1. Note that line numbers must be ordered from min to max; hence before invoke this method, use ordering method
* first
*
* @param filePath
* @param lines
* @return
* @throws Exception
*/
public static String readAsString(String filePath, int... lines) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
StringBuilder sb = new StringBuilder();
String line = null;
int count = 0;
int num = 0;
while ((line = br.readLine()) != null) {
count++;
if (count == lines[num]) {
num++;
sb.append(line + "\r\n");
}
if (num >= lines.length)
break;
}
br.close();
return sb.toString();
}
public static String readAsString(String path) throws Exception {
if (path.startsWith("http://") || path.contains("www."))
return URLReader.read(path);
else
return readAsString(path, new String[] {});
}
/**
* Read the content of a file and return it as a List<String>
*
* @param filePath
* : the file to be read
* @return the content of a file in java.util.List<String>
* @throws Exception
*/
public static List<String> readAsList(String filePath) throws FileNotFoundException, Exception {
return readAsList(filePath, null);
}
@SuppressWarnings("unchecked")
public static <T> List<T> readAsList(String filePath, Converter<String, T> rh) throws FileNotFoundException,
Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
List<T> contents = new ArrayList<>();
T t = null;
String line = null;
while ((line = br.readLine()) != null) {
if (rh == null)
t = (T) line;
else
t = rh.transform(line);
if (t != null)
contents.add(t);
}
br.close();
return contents;
}
public static Set<String> readAsSet(String filePath) throws FileNotFoundException, Exception {
return readAsSet(filePath, null);
}
@SuppressWarnings("unchecked")
public static <T> Set<T> readAsSet(String filePath, Converter<String, T> rh) throws FileNotFoundException,
Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
Set<T> contents = new HashSet<>();
String line = null;
T t = null;
while ((line = br.readLine()) != null) {
if (rh == null)
t = (T) line;
else
t = rh.transform(line);
if (t != null)
contents.add(t);
}
br.close();
return contents;
}
public static Map<String, String> readAsMap(String filePath) throws FileNotFoundException, Exception {
return readAsMap(filePath, ",");
}
public static Map<String, String> readAsMap(String filePath, String seperator) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
Map<String, String> contents = new HashMap<>();
String line = null;
while ((line = br.readLine()) != null) {
String[] data = line.split(seperator);
if (data.length > 1)
contents.put(data[0], data[1]);
}
br.close();
return contents;
}
@SuppressWarnings("unchecked")
public static <T, E> Map<T, E> readAsMap(String filePath, Converter<String, Object[]> rh) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
Map<T, E> contents = new HashMap<>();
String line = null;
while ((line = br.readLine()) != null) {
Object[] obs = rh.transform(line);
contents.put((T) obs[0], (E) obs[1]);
}
br.close();
return contents;
}
/**
* read a map in the form of Map<Integer, Double>
*
* @param filePath
* @return Map<Integer, Double>
* @throws Exception
*/
public static Map<String, Double> readAsIDMap(String filePath) throws Exception {
return readAsIDMap(filePath, ",");
}
/**
* read a map in the form of Map<Integer, Double>
*
* @param filePath
* @return Map<Integer, Double>
* @throws Exception
*/
public static Map<String, Double> readAsIDMap(String filePath, String sep) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
Map<String, Double> contents = new HashMap<>();
String line = null;
while ((line = br.readLine()) != null) {
String[] data = line.split(sep);
if (data.length > 1)
contents.put(data[0], new Double(data[1]));
}
br.close();
return contents;
}
public static void serialize(Object obj, String filePath) throws Exception {
FileOutputStream fos = new FileOutputStream(filePath);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(obj);
oos.flush();
oos.close();
fos.close();
}
public static Object deserialize(String filePath) throws Exception {
FileInputStream fis = new FileInputStream(filePath);
ObjectInputStream ois = new ObjectInputStream(fis);
Object obj = ois.readObject();
ois.close();
fis.close();
return obj;
}
/**
* Rename files in a folder by replacing keywords
*
* @param dirPath
* the directory of files
* @param regex
* the old string needed to be replaced, supporting regular expression
* @param replacement
* the new string used to replace old string
* @throws Exception
*/
public static void renameFiles(String dirPath, String regex, String replacement) throws Exception {
File dir = new File(dirPath);
if (!dir.isDirectory())
throw new Exception(dirPath + " is not a directory");
File[] files = dir.listFiles();
if (files != null && files.length > 0) {
for (File file : files) {
renameFile(file, regex, replacement);
}
}
}
public static void renameFile(File file, String regex, String replacement) {
String filename = file.getName();
filename = filename.replaceAll(regex, replacement);
String path = makeDirPath(file.getPath());
file.renameTo(new File(path + filename));
}
public static void copyFile(String source, String target) throws Exception {
copyFile(new File(source), new File(target));
}
/**
* fast file copy
*/
public static void copyFile(File source, File target) throws Exception {
FileInputStream fis = new FileInputStream(source);
FileOutputStream fos = new FileOutputStream(target);
FileChannel inChannel = fis.getChannel();
FileChannel outChannel = fos.getChannel();
// inChannel.transferTo(0, inChannel.size(), outChannel);
// original -- apparently has trouble copying large files on Windows
// magic number for Windows, 64Mb - 32Kb
int maxCount = (64 * 1024 * 1024) - (32 * 1024);
long size = inChannel.size();
long position = 0;
while (position < size) {
position += inChannel.transferTo(position, maxCount, outChannel);
}
inChannel.close();
outChannel.close();
fis.close();
fos.close();
}
public static void deleteFile(String source) throws Exception {
new File(source).delete();
}
public static void deleteDirectory(String dirPath) throws Exception {
deleteDirectory(new File(dirPath));
}
public static void deleteDirectory(File dir) throws Exception {
cleanDirectory(dir);
dir.delete();
}
public static void cleanDirectory(String dirPath) throws Exception {
cleanDirectory(new File(dirPath));
}
public static void cleanDirectory(File dir) throws Exception {
if (!dir.exists())
return;
if (!dir.isDirectory())
throw new Exception("The path '" + dir.getPath() + "' is not a directory. ");
File[] fs = dir.listFiles();
for (File f : fs) {
if (f.isDirectory())
deleteDirectory(f.getPath());
else
f.delete();
}
}
public static void moveFile(String source, String target) throws Exception {
copyFile(source, target);
deleteFile(source);
}
public static void moveDirectory(String sourceDir, String targetDir) throws Exception {
copyDirectory(sourceDir, targetDir);
deleteDirectory(sourceDir);
}
public static void copyDirectory(String sourceDir, String targetDir) throws Exception {
File sDir = new File(sourceDir);
File tDir = new File(targetDir);
if (sDir.isDirectory()) {
if (!tDir.exists())
tDir.mkdirs();
File[] files = sDir.listFiles();
for (File f : files) {
if (f.isDirectory()) {
copyDirectory(f.getPath(), tDir + Systems.FILE_SEPARATOR + f.getName() + Systems.FILE_SEPARATOR);
} else {
copyFile(f, new File(tDir.getPath() + Systems.FILE_SEPARATOR + f.getName()));
}
}
}
}
/**
* empty a file content
*/
public static void empty(String filePath) throws Exception {
File file = new File(filePath);
if (file.exists())
file.delete();
file.createNewFile();
}
/**
* check whether a file exists
*/
public static boolean exist(String filePath) {
return new File(filePath).exists();
}
/**
* list all files of a given folder
*
* @param dirPath
* a given folder
* @return file list
*/
public static File[] listFiles(String dirPath) {
File dir = new File(dirPath);
if (dir.isDirectory())
return dir.listFiles();
else
return new File[] { dir };
}
/**
* Zip a given folder
*
* @param dirPath
* a given folder: must be all files (not sub-folders)
* @param filePath
* zipped file
* @throws Exception
*/
public static void zipFolder(String dirPath, String filePath) throws Exception {
File outFile = new File(filePath);
ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(outFile));
int bytesRead;
byte[] buffer = new byte[1024];
CRC32 crc = new CRC32();
for (File file : listFiles(dirPath)) {
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
crc.reset();
while ((bytesRead = bis.read(buffer)) != -1) {
crc.update(buffer, 0, bytesRead);
}
bis.close();
// Reset to beginning of input stream
bis = new BufferedInputStream(new FileInputStream(file));
ZipEntry entry = new ZipEntry(file.getName());
entry.setMethod(ZipEntry.STORED);
entry.setCompressedSize(file.length());
entry.setSize(file.length());
entry.setCrc(crc.getValue());
zos.putNextEntry(entry);
while ((bytesRead = bis.read(buffer)) != -1) {
zos.write(buffer, 0, bytesRead);
}
bis.close();
}
zos.close();
Logs.debug("A zip-file is created to: {}", outFile.getPath());
}
public static void main(String[] args) throws Exception {
FileIO.zipFolder(FileIO.desktop + "papers", "desktop.zip");
}
}
| 20,520 | 26.434492 | 117 | java |
librec | librec-master/librec/src/main/java/librec/util/Gamma.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
public class Gamma {
private final static double small = 1e-6;
private final static double large = 9.5;
private final static double d1 = -0.5772156649015328606065121; // digamma(1)
private final static double d2 = Math.pow(Math.PI, 2.0) / 6.0;
private final static double s3 = 1.0 / 12.0;
private final static double s4 = 1.0 / 120.0;
private final static double s5 = 1.0 / 252.0;
private final static double s6 = 1.0 / 240.0;
private final static double s7 = 1.0 / 132.0;
/**
* log Gamma function: log(gamma(alpha)) for alpha>0, accurate to 10 decimal places <br>
*
* Reference: Pike MC & Hill ID (1966) Algorithm 291: Logarithm of the gamma function. Communications of the
* Association for Computing Machinery, 9:684
*
* @return the log of the gamma function of the given alpha
*/
public static double logGamma(double x) {
double tmp = (x - 0.5) * Math.log(x + 4.5) - (x + 4.5);
double ser = 1.0 + 76.18009173 / (x + 0) - 86.50532033 / (x + 1) + 24.01409822 / (x + 2) - 1.231739516
/ (x + 3) + 0.00120858003 / (x + 4) - 0.00000536382 / (x + 5);
return tmp + Math.log(ser * Math.sqrt(2 * Math.PI));
}
/**
* The Gamma function is defined by: <br>
*
* Gamma(x) = integral( t^(x-1) e^(-t), t = 0 .. infinity) <br>
*
* Uses Lanczos approximation formula.
*
*/
public static double gamma(double x) {
return Math.exp(logGamma(x));
}
/**
* digamma(x) = d log Gamma(x)/ dx
*/
public static double digamma(double x) {
double y = 0.0;
double r = 0.0;
if (Double.isInfinite(x) || Double.isNaN(x)) {
return 0.0 / 0.0;
}
if (x == 0.0) {
return -1.0 / 0.0;
}
if (x < 0.0) {
y = gamma(-x + 1) + Math.PI * (1.0 / Math.tan(-Math.PI * x));
return y;
}
// Use approximation if argument <= small.
if (x <= small) {
y = y + d1 - 1.0 / x + d2 * x;
return y;
}
// Reduce to digamma(X + N) where (X + N) >= large.
while (true) {
if (x > small && x < large) {
y = y - 1.0 / x;
x = x + 1.0;
} else {
break;
}
}
// Use de Moivre's expansion if argument >= large.
// In maple: asympt(Psi(x), x);
if (x >= large) {
r = 1.0 / x;
y = y + Math.log(x) - 0.5 * r;
r = r * r;
y = y - r * (s3 - r * (s4 - r * (s5 - r * (s6 - r * s7))));
}
return y;
}
/**
* @return the inverse function of digamma, i.e., returns x such that digamma(x) = y adapted from Tony Minka fastfit
* Matlab code
*/
public static double invDigamma(double y) {
// Newton iteration to solve digamma(x)-y = 0
return y < -2.22 ? (-1.0 / (y - digamma(1))) : (Math.exp(y) + 0.5);
}
}
| 3,335 | 26.8 | 117 | java |
librec | librec-master/librec/src/main/java/librec/util/URLReader.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.SocketAddress;
import java.net.URL;
import java.net.URLConnection;
/**
*
* @author Guo Guibing
*
*/
public class URLReader
{
public static String read(String url) throws Exception
{
URL link = new URL(url);
StringBuilder sb = new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(link.openStream()));
String line = null;
while ((line = br.readLine()) != null)
{
sb.append(line);
sb.append("\r\n");
}
br.close();
return sb.toString();
}
public static String read(String url, String proxyHost, int proxyPort) throws Exception
{
SocketAddress addr = new InetSocketAddress(proxyHost, proxyPort);
Proxy proxy = new Proxy(Proxy.Type.HTTP, addr);
URL link = new URL(url);
URLConnection conn = link.openConnection(proxy);
conn.setConnectTimeout(10 * 1000);
StringBuilder sb = new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line = null;
while ((line = br.readLine()) != null)
{
sb.append(line);
sb.append("\r\n");
}
br.close();
return sb.toString();
}
public static String read(String url, Proxy proxy) throws Exception
{
URL link = new URL(url);
URLConnection conn = link.openConnection(proxy);
conn.setConnectTimeout(10 * 1000);
StringBuilder sb = new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line = null;
while ((line = br.readLine()) != null)
{
sb.append(line);
sb.append("\r\n");
}
br.close();
return sb.toString();
}
}
| 2,471 | 23.72 | 88 | java |
librec | librec-master/librec/src/main/java/librec/util/Randoms.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
/**
*
* @author Guo Guibing
*
*/
public class Randoms {
private static Random r = new Random(System.currentTimeMillis());
private static List<Object> _tempList = new ArrayList<>();
/**
* Random generate an integer in [0, range)
*
* @param range
* @return
*/
public static int uniform(int range) {
return uniform(0, range);
}
public static void seed(long seed) {
r = new Random(seed);
}
/**
* Random generate an integer in [min, max)
*
* @param min
* @param max
* @return
*/
public static int uniform(int min, int max) {
return min + r.nextInt(max - min);
}
/**
* return real number uniformly in [0, 1)
*/
public static double random() {
return uniform();
}
/**
* @return a random number from a given list of numbers
*/
public static <T> T random(List<T> data) {
int idx = uniform(data.size());
return data.get(idx);
}
/**
* a random double array with values in [0, 1)
*
* @param size
* the size of random array
*/
public static double[] doubles(int size) {
double[] array = new double[size];
for (int i = 0; i < size; i++)
array[i] = random();
return array;
}
/**
* a random double array with values in [min, max)
*
* @param size
* the size of random array
*/
public static double[] doubles(double min, double max, int size) {
double[] array = new double[size];
for (int i = 0; i < size; i++)
array[i] = uniform(min, max);
return array;
}
/**
* random (uniformly distributed) double in [0, 1)
*/
public static double uniform() {
return uniform(0.0, 1.0);
}
/**
* random (uniformly distributed) double in [min, max)
*/
public static double uniform(double min, double max) {
return min + (max - min) * r.nextDouble();
}
/**
* Return a boolean, which is true with probability p, and false otherwise.
*/
public static boolean bernoulli(double p) {
return uniform() < p;
}
/**
* Return a boolean, which is true with probability .5, and false otherwise.
*/
public static boolean bernoulli() {
return bernoulli(0.5);
}
/**
* return a real number from a Gaussian distribution with given mean and stddev
*
* @param mu
* mean
* @param sigma
* stddev
*
*/
public static double gaussian(double mu, double sigma) {
return mu + sigma * r.nextGaussian();
}
/**
* Randomly sample 1 point from Gamma Distribution with the given parameters. The code is from Mahout
* (http://mahout.apache.org/), available under Apache 2 license.
*
* @param alpha
* alpha parameter for Gamma Distribution.
* @param scale
* scale parameter for Gamma Distribution.
* @return a sample point randomly drawn from the given distribution.
*/
public static double gamma(double alpha, double scale) {
double rate = 1 / scale;
if (alpha <= 0.0 || rate <= 0.0) {
throw new IllegalArgumentException();
}
double gds;
double b = 0.0;
// CASE A: Acceptance rejection algorithm gs
if (alpha < 1.0) {
b = 1.0 + 0.36788794412 * alpha; // Step 1
while (true) {
double p = b * r.nextDouble();
// Step 2. Case gds <= 1
if (p <= 1.0) {
gds = Math.exp(Math.log(p) / alpha);
if (Math.log(r.nextDouble()) <= -gds) {
return gds / rate;
}
}
// Step 3. Case gds > 1
else {
gds = -Math.log((b - p) / alpha);
if (Math.log(r.nextDouble()) <= ((alpha - 1.0) * Math.log(gds))) {
return gds / rate;
}
}
}
}
// CASE B: Acceptance complement algorithm gd (gaussian distribution,
// box muller transformation)
else {
double ss = 0.0;
double s = 0.0;
double d = 0.0;
// Step 1. Preparations
if (alpha != -1.0) {
ss = alpha - 0.5;
s = Math.sqrt(ss);
d = 5.656854249 - 12.0 * s;
}
// Step 2. Normal deviate
double v12;
double v1;
do {
v1 = 2.0 * r.nextDouble() - 1.0;
double v2 = 2.0 * r.nextDouble() - 1.0;
v12 = v1 * v1 + v2 * v2;
} while (v12 > 1.0);
double t = v1 * Math.sqrt(-2.0 * Math.log(v12) / v12);
double x = s + 0.5 * t;
gds = x * x;
if (t >= 0.0) { // Immediate acceptance
return gds / rate;
}
double u = r.nextDouble();
if (d * u <= t * t * t) { // Squeeze acceptance
return gds / rate;
}
double q0 = 0.0;
double si = 0.0;
double c = 0.0;
// Step 4. Set-up for hat case
if (alpha != -1.0) {
double rr = 1.0 / alpha;
double q9 = 0.0001710320;
double q8 = -0.0004701849;
double q7 = 0.0006053049;
double q6 = 0.0003340332;
double q5 = -0.0003349403;
double q4 = 0.0015746717;
double q3 = 0.0079849875;
double q2 = 0.0208333723;
double q1 = 0.0416666664;
q0 = ((((((((q9 * rr + q8) * rr + q7) * rr + q6) * rr + q5) * rr + q4) * rr + q3) * rr + q2) * rr + q1)
* rr;
if (alpha > 3.686) {
if (alpha > 13.022) {
b = 1.77;
si = 0.75;
c = 0.1515 / s;
} else {
b = 1.654 + 0.0076 * ss;
si = 1.68 / s + 0.275;
c = 0.062 / s + 0.024;
}
} else {
b = 0.463 + s - 0.178 * ss;
si = 1.235;
c = 0.195 / s - 0.079 + 0.016 * s;
}
}
double v, q;
double a9 = 0.104089866;
double a8 = -0.112750886;
double a7 = 0.110368310;
double a6 = -0.124385581;
double a5 = 0.142873973;
double a4 = -0.166677482;
double a3 = 0.199999867;
double a2 = -0.249999949;
double a1 = 0.333333333;
// Step 5. Calculation of q
if (x > 0.0) {
// Step 6.
v = t / (s + s);
if (Math.abs(v) > 0.25) {
q = q0 - s * t + 0.25 * t * t + (ss + ss) * Math.log(1.0 + v);
}
// Step 7. Quotient acceptance
else {
q = q0
+ 0.5
* t
* t
* ((((((((a9 * v + a8) * v + a7) * v + a6) * v + a5) * v + a4) * v + a3) * v + a2) * v + a1)
* v;
}
if (Math.log(1.0 - u) <= q) {
return gds / rate;
}
}
double e7 = 0.000247453;
double e6 = 0.001353826;
double e5 = 0.008345522;
double e4 = 0.041664508;
double e3 = 0.166666848;
double e2 = 0.499999994;
double e1 = 1.000000000;
// Step 8. Double exponential deviate t
while (true) {
double sign_u;
double e;
do { // Step 9. Rejection of t
e = -Math.log(r.nextDouble());
u = r.nextDouble();
u = u + u - 1.0;
sign_u = (u > 0) ? 1.0 : -1.0;
t = b + (e * si) * sign_u;
} while (t <= -0.71874483771719);
// Step 10. New q(t)
v = t / (s + s);
if (Math.abs(v) > 0.25) {
q = q0 - s * t + 0.25 * t * t + (ss + ss) * Math.log(1.0 + v);
} else {
q = q0
+ 0.5
* t
* t
* ((((((((a9 * v + a8) * v + a7) * v + a6) * v + a5) * v + a4) * v + a3) * v + a2) * v + a1)
* v;
}
// Step 11.
if (q <= 0.0) {
continue;
}
// Step 12. Hat acceptance
double w;
if (q > 0.5) {
w = Math.exp(q) - 1.0;
} else {
w = ((((((e7 * q + e6) * q + e5) * q + e4) * q + e3) * q + e2) * q + e1) * q;
}
if (c * u * sign_u <= w * Math.exp(e - 0.5 * t * t)) {
x = s + 0.5 * t;
return x * x / rate;
}
}
}
}
/**
* Return an integer with a Poisson distribution with mean lambda.
*/
public static int poisson(double lambda) {
// using algorithm given by Knuth
// see http://en.wikipedia.org/wiki/Poisson_distribution
int k = 0;
double p = 1.0;
double L = Math.exp(-lambda);
do {
k++;
p *= uniform();
} while (p >= L);
return k - 1;
}
/**
* Return a real number with a Pareto distribution with parameter alpha.
*/
public static double pareto(double alpha) {
return Math.pow(1 - uniform(), -1.0 / alpha) - 1.0;
}
/**
* Return a real number with a Cauchy distribution.
*/
public static double cauchy() {
return Math.tan(Math.PI * (uniform() - 0.5));
}
/**
* Return a number from a discrete distribution: i with probability a[i]. Precondition: array entries are
* nonnegative and their sum (very nearly) equals 1.0.
*/
public static int discrete(double[] a) {
double EPSILON = 1E-6;
double sum = 0.0;
for (int i = 0; i < a.length; i++) {
if (a[i] < 0.0)
throw new IllegalArgumentException("array entry " + i + " is negative: " + a[i]);
sum = sum + a[i];
}
if (sum > 1.0 + EPSILON || sum < 1.0 - EPSILON)
throw new IllegalArgumentException("sum of array entries not equal to one: " + sum);
// the for loop may not return a value when both r is (nearly) 1.0 and when the cumulative sum is less than 1.0 (as a result of floating-point roundoff error)
while (true) {
double r = uniform();
sum = 0.0;
for (int i = 0; i < a.length; i++) {
sum = sum + a[i];
if (sum > r)
return i;
}
}
}
/**
* Return a real number from an exponential distribution with rate lambda.
*/
public static double exp(double lambda) {
return -Math.log(1 - uniform()) / lambda;
}
/**
* generate next random integer in a range besides exceptions
*
* @param range
* the range located of next integer
* @param exceptions
* the exception values when generating integers, sorted first
* @return next no-repeated random integer
*/
public static int nextInt(int range, int... exceptions) {
return nextInt(0, range, exceptions);
}
/**
* generate next random integer in a range [min, max) besides exceptions
*
* @param min
* the minimum of range
* @param max
* the maximum of range
* @param exceptions
* the exception values when generating integers, sorted first
* @return next no-repeated random integer
*/
public static int nextInt(int min, int max, int... exceptions) {
int next;
while (true) {
next = min + r.nextInt(max - min);
if (exceptions != null && exceptions.length > 0 && Arrays.binarySearch(exceptions, next) >= 0) {
continue;
}
if (_tempList.contains(next))
continue;
else {
_tempList.add(next);
break;
}
}
return next;
}
/**
* Generate no repeat {@code size} indexes from {@code min} to {@code max}
*
*/
public static int[] indexs(int size, int min, int max) {
Set<Integer> used = new HashSet<>();
int[] index = new int[size];
for (int i = 0; i < index.length; i++) {
while (true) {
int ind = uniform(min, max);
if (!used.contains(ind)) {
index[i] = ind;
used.add(ind);
break;
}
}
}
return index;
}
public static void clearCache() {
_tempList.clear();
}
/**
* generate next integers array with no repeated elements
*
* @param length
* the length of the array
* @param range
* the index range of the array, default [0, range)
* @param exceptions
* the exceptions when generating values
* @return ascending sorted integer array
* @throws Exception
* if the range is less than length, an exception will be thrown
*/
public static int[] nextIntArray(int length, int range) throws Exception {
return nextIntArray(length, 0, range, null);
}
public static int[] nextIntArray(int length, int range, int... exceptions) throws Exception {
return nextIntArray(length, 0, range, exceptions);
}
public static int[] nextIntArray(int length, int min, int max) throws Exception {
return nextIntArray(length, min, max, null);
}
public static int[] nextIntArray(int length, int min, int max, int... exceptions) throws Exception {
int maxLen = max - min; // because max itself is not counted
if (maxLen < length)
throw new Exception("The range is less than legth");
int[] index = new int[length];
if (maxLen == length) {
for (int i = 0; i < length; i++)
index[i] = min + i;
} else {
Randoms.clearCache();
for (int i = 0; i < index.length; i++)
index[i] = Randoms.nextInt(min, max, exceptions);
Arrays.sort(index);
}
return index;
}
/**
* Generate a set of random (unique) integers in the range [min, max) with length {@code length}
*
* @return a set of unique integers
*/
public static List<Integer> randInts(int length, int min, int max) throws Exception {
int len = max - min;
if (len < length)
throw new Exception("The range is less than legth");
Set<Integer> ints = new HashSet<>();
while (true) {
int rand = min + r.nextInt(max - min);
ints.add(rand);
if (ints.size() >= length)
break;
}
List<Integer> res = new ArrayList<>(ints);
Collections.sort(res);
return res;
}
/**
* Get a normalize array of probabilities
*
* @param size
* array size
*/
public static double[] randProbs(int size) {
if (size < 1)
throw new IllegalArgumentException("The size param must be greate than zero");
double[] pros = new double[size];
int sum = 0;
for (int i = 0; i < pros.length; i++) {
//avoid zero
pros[i] = r.nextInt(size) + 1;
sum += pros[i];
}
//normalize
for (int i = 0; i < pros.length; i++) {
pros[i] = pros[i] / sum;
}
return pros;
}
public static int[] ints(int range, int size) {
int[] data = new int[size];
for (int i = 0; i < size; i++)
data[i] = uniform(range);
return data;
}
public static int[] ints(int min, int max, int size) {
int[] data = new int[size];
for (int i = 0; i < size; i++)
data[i] = uniform(min, max);
return data;
}
public static List<Double> list(int size) {
return list(size, 0, 1, false);
}
public static List<Double> list(int size, int min, int max) {
return list(size, min, max, false);
}
public static List<Double> list(int size, int min, int max, boolean isInteger) {
List<Double> list = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
if (isInteger)
list.add(uniform(min, max) + 0.0);
else
list.add(uniform(min + 0.0, max + 0.0));
}
return list;
}
/**
* Generate a permutation from min to max
*
* @param min
* the minimum value
* @param max
* the maximum value
* @return a permutation
*/
public static List<Integer> permute(int min, int max) {
List<Integer> list = new ArrayList<>();
int len = max - min + 1;
for (int i = 0; i < len; i++) {
while (true) {
int index = uniform(min, max + 1);
if (!list.contains(index)) {
list.add(index);
break;
}
}
}
return list;
}
}
| 15,214 | 22.589147 | 160 | java |
librec | librec-master/librec/src/main/java/librec/util/Systems.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import javax.imageio.ImageIO;
/**
*
* @author Guo Guibing
*
*/
public class Systems {
private static String desktopPath = null;
public final static String FILE_SEPARATOR = System.getProperty("file.separator");
public final static String USER_NAME = System.getProperty("user.name");
public final static String USER_DIRECTORY = System.getProperty("user.home");
public final static String WORKING_DIRECTORY = System.getProperty("user.dir");
public final static String OPERATING_SYSTEM = System.getProperty("os.name");
public enum OS {
Windows, Linux, Mac
}
private static OS os = null;
/**
* @return path to the desktop with a file separator in the end
*/
public static String getDesktop() {
if (desktopPath == null)
desktopPath = USER_DIRECTORY + FILE_SEPARATOR + "Desktop" + FILE_SEPARATOR;
return desktopPath;
}
public static String getIP() {
InetAddress ip = null;
try {
ip = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
e.printStackTrace();
}
return ip.getHostName() + "@" + ip.getHostAddress();
}
public static OS getOs() {
if (os == null) {
for (OS m : OS.values()) {
if (OPERATING_SYSTEM.toLowerCase().contains(m.name().toLowerCase())) {
os = m;
break;
}
}
}
return os;
}
public static void pause() {
try {
Logs.debug("System paused, press [enter] to continue ...");
System.in.read();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void captureScreen() throws Exception {
captureScreen("screenshot.png");
}
public static void captureScreen(String fileName) throws Exception {
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
Rectangle screenRectangle = new Rectangle(screenSize);
Robot robot = new Robot();
BufferedImage image = robot.createScreenCapture(screenRectangle);
File file = new File(fileName);
ImageIO.write(image, "png", file);
Logs.debug("A screenshot is captured to: {}", file.getPath());
}
}
| 2,984 | 25.891892 | 82 | java |
librec | librec-master/librec/src/main/java/librec/util/Stats.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author Guo Guibing
*
*/
public class Stats {
/**
* @return mean value of a sample
*/
public static double mean(Collection<? extends Number> data) {
double sum = 0.0;
int count = 0;
for (Number d : data) {
if (!Double.isNaN(d.doubleValue())) {
sum += d.doubleValue();
count++;
}
}
return sum / count;
}
/**
* @return harmonic mean
*/
public static double hMean(double a, double b) {
return 2 * a * b / (a + b);
}
/**
* reference:
* http://www.weibull.com/DOEWeb/unbiased_and_biased_estimators.htm
*
* <p>
* Notes: the sample mean and population mean is estimated in the same way.
* </p>
*
* @return mean value of a sample
*/
public static double mean(double[] data) {
double sum = 0.0;
int count = 0;
for (double d : data) {
if (!Double.isNaN(d)) {
sum += d;
count++;
}
}
return sum / count;
}
public static double mode(double[] data) {
Map<Double, Integer> modes = new HashMap<>();
double mode = Double.NaN;
int max = 0;
for (double d : data) {
int count = 0;
if (modes.containsKey(d))
count = modes.get(d);
count++;
if (max < count) {
mode = d;
max = count;
}
modes.put(d, count);
}
return mode;
}
public static double weightedcMean(double[] a, double[] w) {
double sum = 0.0, ws = 0.0;
for (int i = 0; i < a.length; i++) {
if (!Double.isNaN(a[i]) && !Double.isNaN(w[i])) {
sum += a[i] * w[i];
ws += w[i];
}
}
return sum / ws;
}
/**
* @return weighted average value of {@code data} and {@code weights}
*/
public static double average(List<Double> data, List<Double> weights) {
double sum = 0, ws = 0;
for (int i = 0; i < data.size(); i++) {
double value = data.get(i);
double weight = weights.get(i);
sum += value * weight;
ws += weight;
}
return sum / ws;
}
/**
* Calculate the median value of an array,
* <em>Note that the values of doulbe.NaN will be ignored silently.</em>
*
*/
public static double median(double[] data) {
double median = 0.0;
// make a clone: do not change original data
double[] clones = data.clone();
Arrays.sort(clones);
int size = clones.length;
int index = 0;
if (size % 2 == 0) {
index = clones.length / 2 - 1;
median = (clones[index] + clones[index + 1]) / 2.0;
} else {
index = (clones.length + 1) / 2 - 1;
median = clones[index];
}
return median;
}
/**
* Calculate the median value of a data collection,
* <em>Note that the values of doulbe.NaN will be ignored silently��</em>
*
*/
public static double median(Collection<? extends Number> data) {
return median(Lists.toArray(data));
}
/**
* Calculate a sample's variance
*
*/
public static double var(double[] data) {
return var(data, mean(data));
}
/**
* Calculate a sample's variance
*
* <p>
* refers to:
* http://www.weibull.com/DOEWeb/unbiased_and_biased_estimators.htm, for an
* explanation why the denominator is (n-1) rather than n.
* </p>
*
*/
public static double var(double[] data, double mean) {
if (data.length == 0)
return Double.NaN;
double sum = 0.0;
for (int i = 0; i < data.length; i++)
sum += (data[i] - mean) * (data[i] - mean);
return sum / data.length;
}
/**
* calculate the standard deviation
*/
public static double sd(Collection<? extends Number> data) {
return sd(data, mean(data));
}
/**
* calculate the standard deviation
*/
public static double sd(Collection<? extends Number> data, double mean) {
double sum = 0.0;
for (Number d : data)
sum += Math.pow(d.doubleValue() - mean, 2);
return Math.sqrt(sum / data.size());
}
/**
* calculate a sample's standard deviation
*/
public static double sd(double[] data) {
return sd(data, mean(data));
}
/**
* calculate a sample's standard deviation
*/
public static double sd(double[] data, double mean) {
return Math.sqrt(var(data, mean));
}
public static double sum(double[] data) {
double sum = 0.0;
for (int i = 0; i < data.length; i++)
sum += data[i];
return sum;
}
public static double sum(Collection<? extends Number> data) {
double sum = 0.0;
for (Number d : data)
sum += d.doubleValue();
return sum;
}
public static int sum(int[] data) {
int sum = 0;
for (int i = 0; i < data.length; i++)
sum += data[i];
return sum;
}
/**
* the sum from 1 to n
*/
public static int sum(int n) {
return n * (n - 1) / 2;
}
/**
* the sum from 1^2 to n^2, with the largest value to n^3/3
*/
public static double sumSquare(int n) {
return n * (n + 0.5) * (n + 1) / 3;
}
/**
* find out the maximum element and its index of an array
*
*/
public static double[] max(double[] data) {
double max = Double.NEGATIVE_INFINITY;
int index = -1;
for (int i = 0; i < data.length; i++) {
if (max < data[i]) {
max = data[i];
index = i;
}
}
return new double[] { max, index };
}
/**
* find out the maximum element and its index of an array
*
*/
public static int[] max(int[] data) {
int max = Integer.MIN_VALUE;
int index = -1;
for (int i = 0; i < data.length; i++) {
if (max < data[i]) {
max = data[i];
index = i;
}
}
return new int[] { max, index };
}
/**
* find out the minimum element and its index of an array
*
*/
public static int[] min(int[] data) {
int min = Integer.MAX_VALUE;
int index = -1;
for (int i = 0; i < data.length; i++) {
if (min > data[i]) {
min = data[i];
index = i;
}
}
return new int[] { min, index };
}
/**
* find out the minimum element and its index of an array
*
*/
public static double[] min(double[] data) {
double min = Double.POSITIVE_INFINITY;
int index = -1;
for (int i = 0; i < data.length; i++) {
if (min > data[i]) {
min = data[i];
index = i;
}
}
return new double[] { min, index };
}
}
| 6,782 | 19.430723 | 76 | java |
librec | librec-master/librec/src/main/java/librec/util/KernelSmoothing.java | package librec.util;
/**
* This is a class implementing kernel smoothing functions
* used in Local Low-Rank Matrix Approximation (LLORMA).
*
* @author Joonseok Lee
* @since 2013. 6. 11
* @version 1.2
*/
public class KernelSmoothing {
public final static int TRIANGULAR_KERNEL = 201;
public final static int UNIFORM_KERNEL = 202;
public final static int EPANECHNIKOV_KERNEL = 203;
public final static int GAUSSIAN_KERNEL = 204;
public static double kernelize(double sim, double width, int kernelType) {
double dist = 1.0 - sim;
if (kernelType == TRIANGULAR_KERNEL) { // Triangular kernel
return Math.max(1 - dist/width, 0);
}
else if (kernelType == UNIFORM_KERNEL) {
return dist < width ? 1 : 0;
}
else if (kernelType == EPANECHNIKOV_KERNEL) {
return Math.max(3.0/4.0 * (1 - Math.pow(dist/width, 2)), 0);
}
else if (kernelType == GAUSSIAN_KERNEL) {
return 1/Math.sqrt(2*Math.PI) * Math.exp(-0.5 * Math.pow(dist/width, 2));
}
else { // Default: Triangular kernel
return Math.max(1 - dist/width, 0);
}
}
}
| 1,060 | 27.675676 | 76 | java |
librec | librec-master/librec/src/main/java/librec/util/Measures.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Reimplemenmt <code>MyMediaLite.Eval.Measures</code> namespace.
*
* @author guoguibing
*
*/
public class Measures {
/**
* Compute the average precision (AP) of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groudTruth
* a collection of positive/correct item IDs
* @return the AP for the given list
*/
public static <T> double AP(List<T> rankedList, List<T> groundTruth) {
int hits = 0;
double sum_precs = 0;
for (int n = 0, m = rankedList.size(); n < m; n++) {
T item = rankedList.get(n);
if (groundTruth.contains(item)) {
hits++;
sum_precs += hits / (n + 1.0); // prec@n
}
}
if (hits > 0)
return sum_precs / groundTruth.size();
else
return 0.0;
}
/**
* Compute the precision at N of a list of ranked items at several N
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @param ns
* the cutoff positions in the list
* @return {N: prec@N}: the precision at N for the given data at the
* different positions N
*/
public static <T> Map<Integer, Double> PrecAt(List<T> rankedList, List<T> groundTruth, List<Integer> ns) {
Map<Integer, Double> prec_at_n = new HashMap<>();
for (int n : ns)
prec_at_n.put(n, PrecAt(rankedList, groundTruth, n));
return prec_at_n;
}
/**
* Compute the precision at N of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @param n
* the cutoff position in the list
* @return the precision at N for the given data
*/
public static <T> double PrecAt(List<T> rankedList, List<T> groundTruth, int n) {
return HitsAt(rankedList, groundTruth, n) / (n + 0.0);
}
/**
* Compute the precision at N of a list of ranked items at several N
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @param ns
* the cutoff positions in the list
* @return {N: recall@N}: the precision at N for the given data at the
* different positions N
*/
public static <T> Map<Integer, Double> RecallAt(List<T> rankedList, List<T> groundTruth, List<Integer> ns) {
Map<Integer, Double> recall_at_n = new HashMap<>();
for (int n : ns)
recall_at_n.put(n, RecallAt(rankedList, groundTruth, n));
return recall_at_n;
}
/**
* Compute the precision at N of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @param n
* the cutoff position in the list
* @return the recall at N for the given data
*/
public static <T> Double RecallAt(List<T> rankedList, List<T> groundTruth, int n) {
return HitsAt(rankedList, groundTruth, n) / (groundTruth.size() + 0.0);
}
/**
* Compute the number of hits until position N of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @param n
* the cutoff position in the list
* @return the hits at N for the given data
*/
public static <T> int HitsAt(List<T> rankedList, List<T> groundTruth, int n) {
int hits = 0;
for (int i = 0, k = rankedList.size(); i < k; i++) {
T item = rankedList.get(i);
if (!groundTruth.contains(item))
continue;
if (i < n)
hits++;
else
break;
}
return hits;
}
/**
* Compute the normalized cumulative gain (NDCG) of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @return the NDCG for the given data
*/
public static <T> double nDCG(List<T> rankedList, List<T> groundTruth) {
double dcg = 0;
double idcg = IDCG(groundTruth.size());
for (int i = 0, n = rankedList.size(); i < n; i++) {
T item_id = rankedList.get(i);
if (!groundTruth.contains(item_id))
continue;
// compute NDCG part
int rank = i + 1;
dcg += 1 / Maths.log(rank + 1, 2);
}
return dcg / idcg;
}
/**
* Compute the ideal DCG given the number of positive items
*
* @param n
* the number of positive items
* @return the ideal DCG
*/
public static double IDCG(int n) {
double idcg = 0;
for (int i = 0; i < n; i++)
idcg += 1 / Maths.log(i + 2, 2);
return idcg;
}
/**
* Compute the reciprocal rank of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @return the mean reciprocal rank for the given data<
*/
public static <T> double RR(List<T> rankedList, List<T> groundTruth) {
for (int i = 0, n = rankedList.size(); i < n; i++) {
T item = rankedList.get(i);
if (groundTruth.contains(item))
return 1 / (i + 1.0);
}
return 0;
}
/**
* Compute the area under the ROC curve (AUC) of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @param num_dropped_items
* the number of relevant items that were not ranked (considered
* to be ranked below all ranked_items)
* @return the AUC for the given data
* @throws Exception
*/
public static <T> double AUC(List<T> rankedList, List<T> groundTruth, int num_dropped_items) {
int num_rele_items = Lists.overlapSize(groundTruth, rankedList);
int num_eval_items = rankedList.size() + num_dropped_items;
int num_eval_pairs = (num_eval_items - num_rele_items) * num_rele_items;
if (num_eval_pairs < 0) {
Logs.error("num_eval_pairs cannot be less than 0");
System.exit(-1);
}
if (num_eval_pairs == 0)
return 0.5;
int num_correct_pairs = 0;
int hits = 0;
for (T item_id : rankedList)
if (!groundTruth.contains(item_id))
num_correct_pairs += hits;
else
hits++;
int num_miss_items = Lists.exceptSize(groundTruth, rankedList);
num_correct_pairs += hits * (num_dropped_items - num_miss_items);
return (num_correct_pairs + 0.0) / num_eval_pairs;
}
/**
* Asymmetric loss function: the asymmetric loss captures the fact that
* recommending bad movies as good movies is worse than recommending good
* movies as bad.
*
* @param rate
* real rating
* @param pred
* predicted rating
* @param minRate
* mininmum rating scale
* @param maxRate
* maximum rating scale
* @return Asymmetric loss value
*/
/*
* Example of asymmetric loss matrix:
*
* {0, 0, 0, 7.5, 10, 12.5}, {0, 0, 0, 4, 6, 8}, {0, 0, 0, 1.5, 3, 4.5}, {3,
* 2, 1, 0, 0, 0}, {4, 3, 2, 0, 0, 0}, {5, 4, 3, 0, 0, 0}
*/
public static double ASYMMLoss(double rate, double pred, double minRate, double maxRate) {
// median value
double med = (minRate + maxRate) / 2.0;
double loss = 0;
if (rate <= med && pred <= med) {
loss = 0;
} else if (rate > med && pred <= med) {
// good movies recommended as bad
loss = rate - pred;
} else if (rate <= med && pred > med) {
// bad movies recommended as good, more penalty
loss = (pred - rate) * (1 + (med - rate + 1) * 0.5);
} else {
loss = 0;
}
return loss;
}
}
| 8,835 | 26.356037 | 109 | java |
librec | librec-master/librec/src/main/java/librec/util/Lists.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.util.FileIO.Converter;
/**
* This class is for the operations of arrays or collections
*
* @author Felix
*
*/
public class Lists {
/**
* @return the proper initial size for a target given capacity, based on the default "load factor=0.7"
*/
public static int initSize(int capacity) {
return (int) (Math.ceil(capacity / 0.7));
}
public static <E> int initSize(Collection<E> collection) {
return initSize(collection.size());
}
/**
* Rearrange the elements of a double array in random order.
*/
public static void shaffle(int[] data) {
int N = data.length;
if (N <= 1)
return;
for (int i = 0; i < N; i++) {
int j = Randoms.uniform(i, N);
int swap = data[i];
data[i] = data[j];
data[j] = swap;
}
}
public static void shaffle(double[] data) {
int N = data.length;
if (N <= 1)
return;
for (int i = 0; i < N; i++) {
int j = Randoms.uniform(i, N);
double swap = data[i];
data[i] = data[j];
data[j] = swap;
}
}
public static <T> void shaffle(List<T> data) {
int N = data.size();
if (N <= 1)
return;
for (int i = 0; i < N; i++) {
int j = Randoms.uniform(i, N);
T swap = data.get(i);
data.set(i, data.get(j));
data.set(j, swap);
}
}
/**
* @return the top-n subset of list {@code data}
*/
public static <T> List<T> subset(List<T> data, int n) {
List<T> ts = new ArrayList<>();
for (int i = 0; i < data.size(); i++) {
ts.add(data.get(i));
if (ts.size() >= n)
break;
}
return ts;
}
/**
* @return a new list of the intersection of two lists: list1 and list2
*/
public static <T> List<T> intersect(List<T> list1, List<T> list2) {
List<T> ts = new ArrayList<>();
for (T t : list1) {
if (list2.contains(t))
ts.add(t);
}
return ts;
}
/**
* @return the number of common items of two lists: list1 and list2
*/
public static <T> int overlapSize(List<T> list1, List<T> list2) {
int res = 0;
for (T t : list1) {
if (list2.contains(t))
res++;
}
return res;
}
/**
* Note: if you need to operate on the original list, it's better to use the method "retainAll" or "removeAll"
*
* @return a new list with the exception of two lists: list1 and list2
*/
public static <T> List<T> except(List<T> list1, List<T> list2) {
List<T> ts = new ArrayList<>();
for (T t : list1) {
if (!list2.contains(t))
ts.add(t);
}
return ts;
}
/**
* @return the number of elements in the first list but not in the second list
*/
public static <T> int exceptSize(List<T> list1, List<T> list2) {
int res = 0;
for (T t : list1) {
if (!list2.contains(t))
res++;
}
return res;
}
/**
* @return whether list is empty: null or no elements insides
*/
public static <T> boolean isEmpty(List<T> ts) {
if (ts == null || ts.size() < 1)
return true;
return false;
}
/**
* Turn a collection of data into an double array
*
* @param data
* a collection of data
* @return an double array
*/
public static double[] toArray(Collection<? extends Number> data) {
if (data == null || data.size() < 1)
return null;
double da[] = new double[data.size()];
int i = 0;
for (Number d : data)
da[i++] = d.doubleValue();
return da;
}
/**
* Turn an double array into a List<Double> object
*
* @param data
* an double array
* @return a List<Double> object
*/
public static List<Double> toList(double[] data) {
if (data == null || data.length < 1)
return null;
List<Double> da = new ArrayList<>();
for (double d : data)
da.add(d);
return da;
}
public static <K, T> List<T> toList(K[] data, Converter<K, T> c) throws Exception {
if (data == null || data.length < 1)
return null;
List<T> da = new ArrayList<>();
for (K d : data)
da.add(c.transform(d));
return da;
}
/**
* Convert int array to int list
*/
public static List<Integer> toList(int[] data) {
List<Integer> da = new ArrayList<>();
for (Integer d : data)
da.add(d);
return da;
}
/**
* sort an {@code Map<K, V extends Comparable<? extends V>} map object
*
* <p>
* <strong>Remark: </strong> note that this method may be memory-consuming as it needs to make an ArrayList copy of
* input Map data. Instead, we suggest to store original data in List<Map.Entry<K,V>> and use sortList() method to
* avoid object copying.
* </p>
*
* @param data
* map data
* @param inverse
* descending if true; otherwise ascending
* @return a sorted list
*
*/
public static <K, V extends Comparable<? super V>> List<Map.Entry<K, V>> sortMap(Map<K, V> data,
final boolean inverse) {
// According to tests, LinkedList is slower than ArrayList
List<Map.Entry<K, V>> pairs = new ArrayList<>(data.entrySet());
sortList(pairs, inverse);
return pairs;
}
/**
* sort a map object: {@code Map<K, V extends Comparable<? extends V>}
*
* @param data
* map data
* @return an ascending sorted list
*/
public static <K, V extends Comparable<? super V>> List<Map.Entry<K, V>> sortMap(Map<K, V> data) {
return sortMap(data, false);
}
/**
* sort a list of objects: {@code List<Map.Entry<K, V extends Comparable<? extends V>>}
*
* @param data
* map data
* @param inverse
* descending if true; otherwise ascending
* @return a sorted list
*/
public static <K, V extends Comparable<? super V>> void sortList(List<Map.Entry<K, V>> data, final boolean inverse) {
Collections.sort(data, new Comparator<Map.Entry<K, V>>() {
@Override
public int compare(Entry<K, V> a, Entry<K, V> b) {
int res = (a.getValue()).compareTo(b.getValue());
return inverse ? -res : res;
}
});
}
/**
* sort a map object: {@code List<Map.Entry<K, V extends Comparable<? extends V>>}
*
* @param data
* map data
* @return an ascending sorted list
*/
public static <K, V extends Comparable<? super V>> void sortList(List<Map.Entry<K, V>> data) {
sortList(data, false);
}
}
| 6,992 | 21.129747 | 118 | java |
librec | librec-master/librec/src/main/java/librec/util/Logs.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.util;
import org.apache.log4j.Level;
import org.apache.log4j.PropertyConfigurator;
import org.apache.log4j.xml.DOMConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Apply delegate mode
*
* @author guoguibing
*
*/
public class Logs {
private final static Logger logger = LoggerFactory.getLogger(Logs.class);
private static String conf = null;
// default configuration
static {
if ((conf = FileIO.getResource("log4j.properties")) != null)
config(conf, false);
else if ((conf = FileIO.getResource("log4j.xml")) != null)
config(conf, true);
}
public static Logger config(String config, boolean isXml) {
if (isXml)
DOMConfigurator.configure(config);
else
PropertyConfigurator.configure(config);
return logger;
}
public static void debug(double data) {
logger.debug(Strings.toString(data));
}
public static void debug(Object msg) {
logger.debug(msg.toString());
}
public static void debug(String msg) {
logger.debug(msg);
}
public static void debug(String format, Object arg) {
logger.debug(format, arg);
}
public static void debug(String format, Object... args) {
logger.debug(format, args);
}
public static void debug() {
debug("");
}
public static void error() {
error("");
}
public static void warn() {
warn("");
}
public static void info() {
info("");
}
public static void info(double data) {
logger.info(Strings.toString(data));
}
public static void info(Object msg) {
if (msg == null)
logger.info("");
else
logger.info(msg.toString());
}
public static void info(String format, Object arg) {
logger.info(format, arg);
}
public static void info(String format, Object... args) {
logger.info(format, args);
}
public static void error(double data) {
logger.error(Strings.toString(data));
}
public static void error(Object msg) {
logger.error(msg.toString());
}
public static void warn(String msg) {
logger.warn(msg);
}
public static void warn(String format, Object arg) {
logger.warn(format, arg);
}
public static void warn(String format, Object... args) {
logger.warn(format, args);
}
public static void warn(double data) {
logger.warn(Strings.toString(data));
}
public static void warn(Object msg) {
logger.warn(msg.toString());
}
public static void error(String msg) {
logger.error(msg);
}
public static void error(String format, Object arg) {
logger.error(format, arg);
}
public static void error(String format, Object... args) {
logger.error(format, args);
}
public static void off() {
org.apache.log4j.Logger.getRootLogger().setLevel(Level.OFF);
}
public static void on() {
org.apache.log4j.Logger.getRootLogger().setLevel(Level.DEBUG);
}
}
| 3,465 | 21.076433 | 74 | java |
librec | librec-master/librec/src/main/java/librec/intf/Recommender.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.intf;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import librec.data.AddConfiguration;
import librec.data.Configuration;
import librec.data.DataDAO;
import librec.data.DataSplitter;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.SymmMatrix;
import librec.metric.IRatingMetric;
import librec.util.Dates;
import librec.util.Debug;
import librec.util.FileConfiger;
import librec.util.FileIO;
import librec.util.LineConfiger;
import librec.util.Lists;
import librec.util.Logs;
import librec.util.Measures;
import librec.util.Sims;
import librec.util.Stats;
import librec.metric.MetricCollection;
import librec.metric.ITimeMetric;
import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.cache.LoadingCache;
/**
* General recommenders
*
* @author Guibing Guo
*/
@Configuration
public abstract class Recommender implements Runnable {
/************************************ Static parameters for all recommenders ***********************************/
// configer
public static FileConfiger cf;
// matrix of rating data
public static SparseMatrix rateMatrix, timeMatrix;
// default temporary file directory
public static String tempDirPath;
// params used for multiple runs
public static Map<String, List<Float>> params = new HashMap<>();
// Guava cache configuration
protected static String cacheSpec;
// number of cpu cores used for parallelization
protected static int numCPUs;
// verbose
protected static boolean verbose = true;
// line configer for item ranking, evaluation
protected static LineConfiger rankOptions, algoOptions;
// is ranking/rating prediction
public static boolean isRankingPred;
// threshold to binarize ratings
public static float binThold;
// the ratio of validation data split from training data
public static float validationRatio;
// is diversity-based measures used
protected static boolean isDiverseUsed;
// early-stop criteria
protected static String earlyStopMeasure = null;
// is save model
protected static boolean isSaveModel = false;
// is split data by date
protected static boolean isSplitByDate;
// view of rating predictions
public static String view;
// rate DAO object
public static DataDAO rateDao;
// number of users, items, ratings
protected static int numUsers, numItems, numRates;
// number of recommended items
protected static int numRecs, numIgnore;
// a list of rating scales
protected static List<Double> ratingScale;
// number of rating levels
protected static int numLevels;
// Maximum, minimum values of rating scales
protected static double maxRate, minRate;
// ratings' timestamps
public static SparseMatrix testTimeMatrix;
// minimum, maximum timestamp
protected static long minTimestamp, maxTimestamp;
// init mean and standard deviation
protected static double initMean, initStd;
// small value for initialization
protected static double smallValue = 0.01;
// number of nearest neighbors
protected static int knn;
// similarity measure
protected static String similarityMeasure;
// number of shrinkage
protected static int similarityShrinkage;
/**
* An indicator of initialization of static fields. This enables us to control when static fields are initialized,
* while "static block" will be always initialized or executed. The latter could cause unexpected exceptions when
* multiple runs (with different configuration files) are conducted sequentially, because some static settings will
* not be override in such a "staic block".
*/
public static boolean resetStatics = true;
/************************************ Recommender-specific parameters ****************************************/
// algorithm's name
public String algoName;
// current fold
protected int fold;
// fold information
protected String foldInfo;
// is output recommendation results
protected boolean isResultsOut = true;
// user-vector cache, item-vector cache
protected LoadingCache<Integer, SparseVector> userCache, itemCache;
// user-items cache, item-users cache
protected LoadingCache<Integer, List<Integer>> userItemsCache, itemUsersCache;
// rating matrix for training, validation and test
protected SparseMatrix trainMatrix, validationMatrix, testMatrix;
// upper symmetric matrix of item-item correlations
protected SymmMatrix corrs;
// performance measures
//public Map<Measure, Double> measures;
// RB Replace old Map with dedicated class for greater flexibility
public MetricCollection measures;
// global average of training rates
protected double globalMean;
/**
* Recommendation measures
*
*/
//public enum Measure {
/* prediction-based measures */
// MAE, RMSE, NMAE, rMAE, rRMSE, MPE, Perplexity,
/* ranking-based measures */
// D5, D10, Pre5, Pre10, Rec5, Rec10, MAP, MRR, NDCG, AUC,
/* execution time */
// TrainTime, TestTime,
/* loss value */
// Loss
//}
/**
* Constructor for Recommender
*
* @param trainMatrix
* train matrix
* @param testMatrix
* test matrix
*/
public Recommender(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
// config recommender
if (cf == null || rateMatrix == null) {
Logs.error("Recommender is not well configed");
System.exit(-1);
}
// static initialization (reset), only done once
if (resetStatics) {
// change the indicator
resetStatics = false;
ratingScale = rateDao.getRatingScale();
minRate = ratingScale.get(0);
maxRate = ratingScale.get(ratingScale.size() - 1);
numLevels = ratingScale.size();
numUsers = rateDao.numUsers();
numItems = rateDao.numItems();
// ratings' timestamps
minTimestamp = rateDao.getMinTimestamp();
maxTimestamp = rateDao.getMaxTimestamp();
if (testTimeMatrix == null)
testTimeMatrix = timeMatrix;
initMean = 0.0;
initStd = 0.1;
cacheSpec = cf.getString("guava.cache.spec", "maximumSize=200,expireAfterAccess=2m");
rankOptions = cf.getParamOptions("item.ranking");
isRankingPred = rankOptions.isMainOn();
isDiverseUsed = rankOptions.contains("-diverse");
numRecs = rankOptions.getInt("-topN", -1);
numIgnore = rankOptions.getInt("-ignore", -1);
LineConfiger evalOptions = cf.getParamOptions("evaluation.setup");
view = evalOptions.getString("--test-view", "all");
validationRatio = evalOptions.getFloat("-v", 0.0f);
isSplitByDate = evalOptions.contains("--by-date");
earlyStopMeasure = evalOptions.getString("--early-stop");
int numProcessors = Runtime.getRuntime().availableProcessors();
numCPUs = evalOptions.getInt("-cpu", numProcessors);
// output options
LineConfiger outputOptions = cf.getParamOptions("output.setup");
if (outputOptions != null) {
verbose = outputOptions.isOn("-verbose", true);
isSaveModel = outputOptions.contains("--save-model");
}
knn = cf.getInt("num.neighbors", 20);
similarityMeasure = cf.getString("similarity", "PCC");
similarityShrinkage = cf.getInt("num.shrinkage", 30);
}
// 2016/8/2 RB Added metric configuration
// These are class names
LineConfiger metricOptions = cf.getParamOptions("metric.options");
List<String> metrics;
boolean defaultMetrics = false;
if ((metricOptions == null) || metricOptions.contains("--rating")) {
metrics = Arrays.asList(MetricCollection.RatingMetrics);
defaultMetrics = true;
} else if (metricOptions.contains("--all")) {
metrics = Arrays.asList(MetricCollection.AllMetrics);
defaultMetrics = true;
} else {
metrics = metricOptions.getOptions("-metrics");
}
List<String> metricNames = new ArrayList<String>();
for (String name : metrics) {
// If it does not have a . separator for the class name, we assume it is one of the
// built-in classes and append the librec class info.
if (!name.contains(".")) {
name = "librec.metric." + name;
}
metricNames.add(name);
}
// These are always included
if (!defaultMetrics) {
metricNames.add("librec.metric.TrainTime");
metricNames.add("librec.metric.TestTime");
}
try {
measures = new MetricCollection(metricNames);
} catch (Exception e) {
Logs.debug("Failed to initialize metrics: " + e);
System.exit(-1);
}
// training, validation, test data
if (validationRatio > 0 && validationRatio < 1) {
DataSplitter ds = new DataSplitter(trainMatrix);
double ratio = 1 - validationRatio;
SparseMatrix[] trainSubsets = isSplitByDate ? ds.getRatioByRatingDate(ratio, timeMatrix) : ds
.getRatioByRating(ratio);
this.trainMatrix = trainSubsets[0];
this.validationMatrix = trainSubsets[1];
} else {
this.trainMatrix = trainMatrix;
}
this.testMatrix = testMatrix;
// fold info
this.fold = fold;
foldInfo = fold > 0 ? " fold [" + fold + "]" : "";
// whether to write out results
LineConfiger outputOptions = cf.getParamOptions("output.setup");
if (outputOptions != null) {
isResultsOut = outputOptions.isMainOn();
}
// global mean
numRates = trainMatrix.size();
globalMean = trainMatrix.sum() / numRates;
// class name as the default algorithm name
setAlgoName(this.getClass().getSimpleName());
// compute item-item correlations
if (measures.hasRankingMetrics() && measures.hasDiversityMetrics()) {
corrs = new SymmMatrix(numItems);
}
}
public void run() {
try {
execute();
} catch (Exception e) {
// capture error message
Logs.error(e.getMessage());
e.printStackTrace();
}
}
/**
* execution method of a recommender
*
*/
public void execute() throws Exception {
Stopwatch sw = Stopwatch.createStarted();
if (Debug.ON) {
// learn a recommender model
initModel();
// show algorithm's configuration
printAlgoConfig();
// build the model
buildModel();
// post-processing after building a model, e.g., release intermediate memory to avoid memory leak
postModel();
} else {
/**
* load a learned model: this code will not be executed unless "Debug.OFF" mainly for the purpose of
* exemplifying how to use the saved models
*/
loadModel();
}
long trainTime = sw.elapsed(TimeUnit.MILLISECONDS);
// validation
if (validationRatio > 0 && validationRatio < 1) {
validateModel();
trainTime = sw.elapsed(TimeUnit.MILLISECONDS);
}
measures.init(this);
// evaluation
if (verbose)
Logs.debug("{}{} evaluate test data ... ", algoName, foldInfo);
// TODO: to predict ratings only, or do item recommendations only
if (measures.hasRankingMetrics() && (measures.hasRatingMetrics())) {
evalRankings();
evalRatings();
} else if (measures.hasRatingMetrics()) {
evalRatings();
} else if (measures.hasRankingMetrics()) {
evalRankings();
} else {
Logs.debug("No metrics found.");
}
String measurements = measures.getEvalResultString();
sw.stop();
long testTime = sw.elapsed(TimeUnit.MILLISECONDS) - trainTime;
// collecting results
ITimeMetric trainTimeMetric = measures.getTimeMetric("TrainTime");
ITimeMetric testTimeMetric = measures.getTimeMetric("TestTime");
trainTimeMetric.setTime(trainTime);
testTimeMetric.setTime(testTime);
// added metric names
String evalInfo = "Metrics: " + measures.getMetricNamesString() + "\n";
evalInfo += algoName + foldInfo + ": " + measurements + "\tTime: "
+ trainTimeMetric.getValueAsString() + ", "
+ testTimeMetric.getValueAsString();
if (!isRankingPred)
evalInfo += "\tView: " + view;
if (fold > 0)
Logs.debug(evalInfo);
if (isSaveModel)
saveModel();
}
private void printAlgoConfig() {
String algoInfo = toString();
Class<? extends Recommender> cl = this.getClass();
// basic annotation
String algoConfig = cl.getAnnotation(Configuration.class).value();
// additional algorithm-specific configuration
if (cl.isAnnotationPresent(AddConfiguration.class)) {
AddConfiguration add = cl.getAnnotation(AddConfiguration.class);
String before = add.before();
if (!Strings.isNullOrEmpty(before))
algoConfig = before + ", " + algoConfig;
String after = add.after();
if (!Strings.isNullOrEmpty(after))
algoConfig += ", " + after;
}
if (!algoInfo.isEmpty()) {
if (!algoConfig.isEmpty())
Logs.debug("{}: [{}] = [{}]", algoName, algoConfig, algoInfo);
else
Logs.debug("{}: {}", algoName, algoInfo);
}
}
/**
* validate model with held-out validation data
*/
protected void validateModel() {
}
/**
* initilize recommender model
*/
protected void initModel() throws Exception {
}
protected LineConfiger getModelParams(String algoName) {
return cf.contains(algoName) ? cf.getParamOptions(algoName) : null;
}
/**
* build user-user or item-item correlation matrix from training data
*
* @param isUser
* whether it is user-user correlation matrix
*
* @return a upper symmetric matrix with user-user or item-item coefficients
*
*/
protected SymmMatrix buildCorrs(boolean isUser) {
Logs.debug("Build {} similarity matrix ...", isUser ? "user" : "item");
int count = isUser ? numUsers : numItems;
SymmMatrix corrs = new SymmMatrix(count);
for (int i = 0; i < count; i++) {
SparseVector iv = isUser ? trainMatrix.row(i) : trainMatrix.column(i);
if (iv.getCount() == 0)
continue;
// user/item itself exclusive
for (int j = i + 1; j < count; j++) {
SparseVector jv = isUser ? trainMatrix.row(j) : trainMatrix.column(j);
double sim = correlation(iv, jv);
if (!Double.isNaN(sim))
corrs.set(i, j, sim);
}
}
return corrs;
}
/**
* Compute the correlation between two vectors using method specified by configuration key "similarity"
*
* @param iv
* vector i
* @param jv
* vector j
* @return the correlation between vectors i and j
*/
protected double correlation(SparseVector iv, SparseVector jv) {
return correlation(iv, jv, similarityMeasure);
}
/**
* Compute the correlation between two vectors for a specific method
*
* @param iv
* vector i
* @param jv
* vector j
* @param method
* similarity method
* @return the correlation between vectors i and j; return NaN if the correlation is not computable.
*/
protected double correlation(SparseVector iv, SparseVector jv, String method) {
// compute similarity
List<Double> is = new ArrayList<>();
List<Double> js = new ArrayList<>();
for (Integer idx : jv.getIndex()) {
if (iv.contains(idx)) {
is.add(iv.get(idx));
js.add(jv.get(idx));
}
}
double sim = 0;
switch (method.toLowerCase()) {
case "cos":
// for ratings along the overlappings
sim = Sims.cos(is, js);
break;
case "cos-binary":
// for ratings along all the vectors (including one-sided 0s)
sim = iv.inner(jv) / (Math.sqrt(iv.inner(iv)) * Math.sqrt(jv.inner(jv)));
break;
case "msd":
sim = Sims.msd(is, js);
break;
case "cpc":
sim = Sims.cpc(is, js, (minRate + maxRate) / 2.0);
break;
case "exjaccard":
sim = Sims.exJaccard(is, js);
break;
case "pcc":
default:
sim = Sims.pcc(is, js);
break;
}
// shrink to account for vector size
if (!Double.isNaN(sim)) {
int n = is.size();
int shrinkage = cf.getInt("num.shrinkage");
if (shrinkage > 0)
sim *= n / (n + shrinkage + 0.0);
}
return sim;
}
/**
* Learning method: override this method to build a model, for a model-based method. Default implementation is
* useful for memory-based methods.
*
*/
protected void buildModel() throws Exception {
}
/**
* After learning model: release some intermediate data to avoid memory leak
*/
protected void postModel() throws Exception {
}
/**
* Serializing a learned model (i.e., variable data) to files.
*/
protected void saveModel() throws Exception {
}
/**
* Deserializing a learned model (i.e., variable data) from files.
*/
protected void loadModel() throws Exception {
}
/**
* determine whether the rating of a user-item (u, j) is used to predicted
*
*/
protected boolean isTestable(int u, int j) {
switch (view) {
case "cold-start":
return trainMatrix.rowSize(u) < 5 ? true : false;
case "all":
default:
return true;
}
}
/**
* @return the evaluation results of rating predictions
*/
protected void evalRatings() throws Exception {
List<String> preds = null;
String toFile = null;
if (isResultsOut) {
preds = new ArrayList<String>(1500);
preds.add("# userId itemId rating prediction"); // optional: file header
toFile = tempDirPath + algoName + "-rating-predictions" + foldInfo + ".txt"; // the output-file name
FileIO.deleteFile(toFile); // delete possibly old files
}
int numCount = 0;
for (MatrixEntry me : testMatrix) {
double rate = me.get();
int u = me.row();
int j = me.column();
if (!isTestable(u, j))
continue;
double pred = predict(u, j, true);
// 2016/8/2 RB Should the metrics handle this?
if (Double.isNaN(pred))
continue;
measures.updateRatingMetrics(u, j, pred, rate, this);
numCount++;
// output predictions
if (isResultsOut) {
// restore back to the original user/item id
preds.add(rateDao.getUserId(u) + " " + rateDao.getItemId(j) + " " + rate + " " + (float) pred);
if (preds.size() >= 1000) {
FileIO.writeList(toFile, preds, true);
preds.clear();
}
}
}
if (isResultsOut && preds.size() > 0) {
FileIO.writeList(toFile, preds, true);
Logs.debug("{}{} has written rating predictions to {}", algoName, foldInfo, toFile);
}
measures.computeRatingMetrics(numCount);
}
/**
* @return the evaluation results of ranking predictions
*/
protected void evalRankings() throws Exception {
int capacity = Lists.initSize(testMatrix.numRows());
// candidate items for all users: here only training items
// use HashSet instead of ArrayList to speedup removeAll() and contains() operations: HashSet: O(1); ArrayList: O(log n).
Set<Integer> candItems = new HashSet<>(trainMatrix.columns());
List<String> preds = null;
String toFile = null;
int numTopNRanks = numRecs < 0 ? 10 : numRecs;
if (isResultsOut) {
preds = new ArrayList<String>(1500);
preds.add("# userId: recommendations in (itemId, ranking score) pairs, where a correct recommendation is denoted by symbol *."); // optional: file header
toFile = tempDirPath
+ String.format("%s-top-%d-items%s.txt", new Object[] { algoName, numTopNRanks, foldInfo }); // the output-file name
FileIO.deleteFile(toFile); // delete possibly old files
}
if (verbose)
Logs.debug("{}{} has candidate items: {}", algoName, foldInfo, candItems.size());
// ignore items for all users: most popular items
if (numIgnore > 0) {
List<Map.Entry<Integer, Integer>> itemDegs = new ArrayList<>();
for (Integer j : candItems) {
itemDegs.add(new SimpleImmutableEntry<Integer, Integer>(j, trainMatrix.columnSize(j)));
}
Lists.sortList(itemDegs, true);
int k = 0;
for (Map.Entry<Integer, Integer> deg : itemDegs) {
// ignore these items from candidate items
candItems.remove(deg.getKey());
if (++k >= numIgnore)
break;
}
}
// for each test user
int numCount = 0;
for (int u = 0, um = testMatrix.numRows(); u < um; u++) {
if (verbose && ((u + 1) % 100 == 0))
Logs.debug("{}{} evaluates progress: {} / {}", algoName, foldInfo, u + 1, um);
// number of candidate items for all users
int numCands = candItems.size();
// get positive items from test matrix
List<Integer> testItems = testMatrix.getColumns(u);
List<Integer> correctItems = new ArrayList<>();
// intersect with the candidate items
for (Integer j : testItems) {
if (candItems.contains(j))
correctItems.add(j);
}
if (correctItems.size() == 0)
continue; // no testing data for user u
// remove rated items from candidate items
List<Integer> ratedItems = trainMatrix.getColumns(u);
// predict the ranking scores (unordered) of all candidate items
List<Map.Entry<Integer, Double>> itemScores = new ArrayList<>(Lists.initSize(candItems));
for (final Integer j : candItems) {
// item j is not rated
if (!ratedItems.contains(j)) {
final double rank = ranking(u, j);
if (!Double.isNaN(rank)) {
itemScores.add(new SimpleImmutableEntry<Integer, Double>(j, rank));
}
} else {
numCands--;
}
}
if (itemScores.size() == 0)
continue; // no recommendations available for user u
// order the ranking scores from highest to lowest: List to preserve orders
Lists.sortList(itemScores, true);
List<Map.Entry<Integer, Double>> recomd = (numRecs <= 0 || itemScores.size() <= numRecs) ? itemScores
: itemScores.subList(0, numRecs);
List<Integer> rankedItems = new ArrayList<>();
StringBuilder sb = new StringBuilder();
int count = 0;
for (Map.Entry<Integer, Double> kv : recomd) {
Integer item = kv.getKey();
rankedItems.add(item);
if (isResultsOut && count < numTopNRanks) {
// restore back to the original item id
sb.append("(").append(rateDao.getItemId(item));
if (testItems.contains(item))
sb.append("*"); // indicating correct recommendation
sb.append(", ").append(kv.getValue().floatValue()).append(")");
count++;
if (count < numTopNRanks)
sb.append(", ");
}
}
numCount++;
int numDropped = numCands - rankedItems.size();
measures.updateRankingMetrics(rankedItems, correctItems, numDropped, this);
// output predictions
if (isResultsOut) {
// restore back to the original user id
preds.add(rateDao.getUserId(u) + ": " + sb.toString());
if (preds.size() >= 1000) {
FileIO.writeList(toFile, preds, true);
preds.clear();
}
}
}
// write results out first
if (isResultsOut && preds.size() > 0) {
FileIO.writeList(toFile, preds, true);
Logs.debug("{}{} has written item recommendations to {}", algoName, foldInfo, toFile);
}
// measure the performance
measures.computeRankingMetrics(numCount);
}
/**
* predict a specific rating for user u on item j. It is useful for evalution which requires predictions are
* bounded.
*
* @param u
* user id
* @param j
* item id
* @param bound
* whether to bound the prediction
* @return prediction
*/
public double predict(int u, int j, boolean bound) throws Exception {
double pred = predict(u, j);
if (bound) {
if (pred > maxRate)
pred = maxRate;
if (pred < minRate)
pred = minRate;
}
return pred;
}
/**
* predict a specific rating for user u on item j, note that the prediction is not bounded. It is useful for
* building models with no need to bound predictions.
*
* @param u
* user id
* @param j
* item id
* @return raw prediction without bounded
*/
public double predict(int u, int j) throws Exception {
return globalMean;
}
public double perplexity(int u, int j, double r) throws Exception {
return 0;
}
/**
* predict a ranking score for user u on item j: default case using the unbounded predicted rating values
*
* @param u
* user id
*
* @param j
* item id
* @return a ranking score for user u on item j
*/
public double ranking(int u, int j) throws Exception {
return predict(u, j, false);
}
/**
*
* @param rankedItems
* the list of ranked items to be recommended
* @param cutoff
* cutoff in the list
* @param corrs
* correlations between items
* @return diversity at a specific cutoff position
*/
public double diverseAt(List<Integer> rankedItems, int cutoff) {
int num = 0;
double sum = 0.0;
for (int id = 0; id < cutoff; id++) {
int i = rankedItems.get(id);
SparseVector iv = trainMatrix.column(i);
for (int jd = id + 1; jd < cutoff; jd++) {
int j = rankedItems.get(jd);
double corr = corrs.get(i, j);
if (corr == 0) {
// if not found
corr = correlation(iv, trainMatrix.column(j));
if (!Double.isNaN(corr))
corrs.set(i, j, corr);
}
if (!Double.isNaN(corr)) {
sum += (1 - corr);
num++;
}
}
}
return 0.5 * (sum / num);
}
/**
* Below are a set of mathematical functions. As many recommenders often adopts them, for conveniency's sake, we put
* these functions in the base Recommender class, though they belong to Math class.
*
*/
/**
* logistic function g(x)
*/
protected double g(double x) {
return 1.0 / (1 + Math.exp(-x));
}
/**
* gradient value of logistic function g(x)
*/
protected double gd(double x) {
return g(x) * g(-x);
}
/**
* @param x
* input value
* @param mu
* mean of normal distribution
* @param sigma
* standard deviation of normation distribution
*
* @return a gaussian value with mean {@code mu} and standard deviation {@code sigma};
*/
protected double gaussian(double x, double mu, double sigma) {
return Math.exp(-0.5 * Math.pow(x - mu, 2) / (sigma * sigma));
}
/**
* normalize a rating to the region (0, 1)
*/
protected double normalize(double rate) {
return (rate - minRate) / (maxRate - minRate);
}
/**
* Check if ratings have been binarized; useful for methods that require binarized ratings;
*/
protected void checkBinary() {
if (binThold < 0) {
Logs.error("val.binary.threshold={}, ratings must be binarized first! Try set a non-negative value.",
binThold);
System.exit(-1);
}
}
/**
*
* denormalize a prediction to the region (minRate, maxRate)
*/
protected double denormalize(double pred) {
return minRate + pred * (maxRate - minRate);
}
/**
* useful to print out specific recommender's settings
*/
@Override
public String toString() {
return "";
}
/**
* Set a user-specific name of an algorithm
*
*/
protected void setAlgoName(String algoName) {
this.algoName = algoName;
// get parameters of an algorithm
algoOptions = getModelParams(algoName);
}
}
| 27,517 | 26.739919 | 156 | java |
librec | librec-master/librec/src/main/java/librec/intf/TensorRecommender.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.intf;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.Configuration;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.data.SparseTensor;
import librec.data.TensorEntry;
import librec.util.FileIO;
import librec.util.Logs;
import librec.util.Strings;
/**
* Interface for tensor recommenders
*
* @author Guo Guibing
*
*/
@Configuration("factors, lRate, maxLRate, reg, iters, boldDriver")
public class TensorRecommender extends IterativeRecommender {
/* for all tensors */
protected static SparseTensor rateTensor;
protected static int numDimensions, userDimension, itemDimension;
protected static int[] dimensions;
/* for a specific recommender */
protected SparseTensor trainTensor, testTensor;
static {
rateTensor = rateDao.getRateTensor();
numDimensions = rateTensor.numDimensions();
dimensions = rateTensor.dimensions();
userDimension = rateTensor.getUserDimension();
itemDimension = rateTensor.getItemDimension();
}
public TensorRecommender(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) throws Exception {
super(trainMatrix, testMatrix, fold);
// construct train and test data
trainTensor = rateTensor.clone();
testTensor = new SparseTensor(dimensions);
testTensor.setUserDimension(userDimension);
testTensor.setItemDimension(itemDimension);
for (MatrixEntry me : testMatrix) {
int u = me.row();
int i = me.column();
List<Integer> indices = rateTensor.getIndices(u, i);
for (int index : indices) {
int[] keys = rateTensor.keys(index);
testTensor.set(rateTensor.value(index), keys);
trainTensor.remove(keys);
}
}
}
@Override
protected void evalRatings() throws Exception {
List<String> preds = null;
String toFile = null;
if (isResultsOut) {
preds = new ArrayList<String>(1500);
preds.add("# userId itemId rating prediction"); // optional: file header
toFile = tempDirPath + algoName + "-rating-predictions" + foldInfo + ".txt"; // the output-file name
FileIO.deleteFile(toFile); // delete possibly old files
}
double sum_maes = 0, sum_mses = 0, sum_r_maes = 0, sum_r_rmses = 0;
int numCount = 0, numPEs = 0;
for (TensorEntry te : testTensor) {
double rate = te.get();
int u = te.key(userDimension);
int j = te.key(itemDimension);
if (!isTestable(u, j))
continue;
double pred = predict(te.keys(), true);
if (Double.isNaN(pred))
continue;
measures.updateRatingMetrics(u, j, pred, rate, this);
numCount++;
// output predictions
if (isResultsOut) {
// restore back to the original user/item id
preds.add(rateDao.getUserId(u) + " " + rateDao.getItemId(j) + " " + rate + " " + (float) pred);
if (preds.size() >= 1000) {
FileIO.writeList(toFile, preds, true);
preds.clear();
}
}
}
if (isResultsOut && preds.size() > 0) {
FileIO.writeList(toFile, preds, true);
Logs.debug("{}{} has written rating predictions to {}", algoName, foldInfo, toFile);
}
measures.computeRatingMetrics(numCount);
}
protected double predict(int[] keys, boolean bound) throws Exception {
double pred = predict(keys);
if (bound) {
if (pred > maxRate)
pred = maxRate;
if (pred < minRate)
pred = minRate;
}
return pred;
}
protected double predict(int[] keys) throws Exception {
return predict(keys[userDimension], keys[itemDimension]);
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, initLRate, maxLRate, reg, numIters, isBoldDriver });
}
}
| 4,326 | 26.56051 | 105 | java |
librec | librec-master/librec/src/main/java/librec/intf/GraphicRecommender.java | package librec.intf;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.util.LineConfiger;
import librec.util.Logs;
import librec.util.Strings;
import com.google.common.collect.Table;
/**
* Probabilistic Graphic Models
*
* @author guoguibing
*
*/
@Configuration("iters, burn.in, sample.lag")
public class GraphicRecommender extends Recommender {
/**
* line configer for general probabilistic graphic models
*/
protected static LineConfiger pgmOptions;
/**
* number of topics
*/
protected static int numFactors;
/**
* Dirichlet hyper-parameters of user-topic distribution: typical value is 50/K
*/
protected static float initAlpha;
/**
* Dirichlet hyper-parameters of topic-item distribution, typical value is 0.01
*/
protected static float initBeta;
/**
* burn-in period
*/
protected static int burnIn;
/**
* sample lag (if -1 only one sample taken)
*/
protected static int sampleLag;
/**
* maximum number of iterations
*/
protected static int numIters;
/**
* intervals for printing verbose information
*/
protected static int numIntervals;
// indicator of initialization of the general recommender
public static boolean isInitialized = false;
/*********************************** Method-specific Parameters ************************/
/**
* entry[u, i, k]: topic assignment as sparse structure
*/
protected Table<Integer, Integer, Integer> z;
/**
* entry[i, k]: number of tokens assigned to topic k, given item i.
*/
protected DenseMatrix Nik;
/**
* entry[k, i]: number of tokens assigned to topic k, given item i.
*/
protected DenseMatrix Nki;
/**
* entry[u, k]: number of tokens assigned to topic k, given user u.
*/
protected DenseMatrix Nuk;
/**
* entry[k]: number of tokens assigned to topic t.
*/
protected DenseVector Nk;
/**
* entry[u]: number of tokens rated by user u.
*/
protected DenseVector Nu;
/**
* entry[i]: number of tokens rating item i.
*/
protected DenseVector Ni;
/**
* vector of hyperparameters for alpha and beta
*/
protected DenseVector alpha, beta;
/**
* cumulative statistics of theta, phi
*/
protected DenseMatrix PukSum, PikSum, PkiSum;
/**
* posterior probabilities of parameters
*
*/
protected DenseMatrix Puk, Pki, Pik;
/**
* size of statistics
*/
protected int numStats = 0;
/**
* objective loss
*/
protected double loss, lastLoss;
public GraphicRecommender(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
if (!isInitialized) {
isInitialized = true;
numFactors = cf.getInt("num.factors", 10);
numIters = cf.getInt("num.max.iter", 30);
pgmOptions = cf.getParamOptions("pgm.setup");
if (pgmOptions != null) {
burnIn = pgmOptions.getInt("-burn-in");
sampleLag = pgmOptions.getInt("-sample-lag");
numIntervals = pgmOptions.getInt("-interval");
initAlpha = pgmOptions.getFloat("-alpha", 1.0f / numFactors);
initBeta = pgmOptions.getFloat("-beta", 1.0f / numFactors);
assert burnIn > 0;
assert sampleLag > 0;
}
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
// E-step: infer parameters
eStep();
// M-step: update hyper-parameters
mStep();
// get statistics after burn-in
if ((iter > burnIn) && (iter % sampleLag == 0)) {
readoutParams();
if (isConverged(iter))
break;
}
if (verbose && (iter % numIntervals == 0))
Logs.debug("{}{} runs at iter {}/{}", algoName, foldInfo, iter, numIters);
}
// retrieve posterior probability distributions
estimateParams();
}
/**
* update the hyper-parameters
*/
protected void mStep() {
}
/**
* employing early stopping criteria
*
* @param iter
* current iteration
*/
protected boolean isConverged(int iter) throws Exception {
return false;
}
/**
* estimate the model parameters
*/
protected void estimateParams() {
}
/**
* parameters estimation: used in the training phase
*/
protected void eStep() {
}
/**
* parameters inference: used if new user arrives in the test phase
*/
protected void inference() {
}
/**
* read out parameters for each iteration
*/
protected void readoutParams() {
}
@Override
public String toString() {
return Strings.toString(new Object[] { numIters, burnIn, sampleLag }, ", ");
}
}
| 4,533 | 19.151111 | 90 | java |
librec | librec-master/librec/src/main/java/librec/intf/ContextRecommender.java | package librec.intf;
import java.util.Map;
import com.google.common.collect.Table;
import librec.data.ItemContext;
import librec.data.RatingContext;
import librec.data.SparseMatrix;
import librec.data.UserContext;
/**
* Generic recommenders where contextual information is used. The context can be user-, item- and rating-related.
*
* @author guoguibing
*
*/
public class ContextRecommender extends IterativeRecommender {
// {user, user-context}
protected static Map<Integer, UserContext> userContexts;
// {item, item-context}
protected static Map<Integer, ItemContext> itemContexts;
// {user, item, rating-context}
protected static Table<Integer, Integer, RatingContext> ratingContexts;
// initialization
static {
// read context information here
}
public ContextRecommender(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
}
| 913 | 23.052632 | 113 | java |
librec | librec-master/librec/src/main/java/librec/intf/SocialRecommender.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.intf;
import java.util.List;
import librec.data.Configuration;
import librec.data.DataDAO;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.util.Logs;
import librec.util.Strings;
import com.google.common.cache.LoadingCache;
/**
* Recommenders in which social information is used
*
* @author guoguibing
*
*/
@Configuration("factors, lRate, maxLRate, regB, regU, regI, regS, iters, boldDriver")
public abstract class SocialRecommender extends IterativeRecommender {
// social data dao
protected static DataDAO socialDao;
// socialMatrix: social rate matrix, indicating a user is connecting to a number of other users
// trSocialMatrix: inverse social matrix, indicating a user is connected by a number of other users
protected static SparseMatrix socialMatrix;
// social regularization
protected static float regS;
// indicator of static field initialization or reset
public static boolean resetStatics = true;
// shared social cache for all social recommenders
protected LoadingCache<Integer, SparseVector> socialCache;
protected LoadingCache<Integer, List<Integer>> userFriendsCache;
// initialization
static {
String socialPath = cf.getPath("dataset.social");
Logs.debug("Social dataset: {}", Strings.last(socialPath, 38));
socialDao = new DataDAO(socialPath, rateDao.getUserIds());
try {
socialMatrix = socialDao.readData()[0];
numUsers = socialDao.numUsers();
//socialCache = socialMatrix.rowCache(cacheSpec);
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
}
}
public SocialRecommender(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
if (resetStatics) {
resetStatics = false;
regS = regOptions.getFloat("-s", reg);
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, initLRate, maxLRate, regB, regU, regI, regS, numIters,
isBoldDriver });
}
@Override
protected boolean isTestable(int u, int j) {
switch (view) {
case "cold-start":
return trainMatrix.rowSize(u) < 5 ? true : false;
case "trust-degree":
int min_deg = cf.getInt("min.trust.degree");
int max_deg = cf.getInt("max.trust.degree");
if (min_deg == -1)
min_deg = 0;
if (max_deg == -1)
max_deg = Integer.MAX_VALUE;
// size could be indegree + outdegree
int in_deg = socialMatrix.columnSize(u);
int out_deg = socialMatrix.rowSize(u);
int deg = in_deg + out_deg;
boolean cond = (deg >= min_deg) && (deg <= max_deg);
return cond ? true : false;
case "all":
default:
return true;
}
}
}
| 3,351 | 26.933333 | 107 | java |
librec | librec-master/librec/src/main/java/librec/intf/IterativeRecommender.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.intf;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.util.FileIO;
import librec.util.LineConfiger;
import librec.util.Logs;
import librec.util.Strings;
/**
* Recommenders using iterative learning techniques
*
* @author guoguibing
*
*/
@Configuration("factors, lRate, maxLRate, regB, regU, regI, iters, boldDriver")
public abstract class IterativeRecommender extends Recommender {
/************************************ Static parameters for all recommenders ***********************************/
// init, maximum learning rate, momentum
protected static float initLRate, maxLRate, momentum;
// line configer for regularization parameters
protected static LineConfiger regOptions;
// user, item and bias regularization
protected static float regU, regI, regB, reg;
// number of factors
protected static int numFactors;
// number of iterations
protected static int numIters;
// whether to adjust learning rate automatically
protected static boolean isBoldDriver;
// decay of learning rate
protected static float decay;
// indicator of static field initialization
public static boolean resetStatics = true;
/************************************ Recommender-specific parameters ****************************************/
// factorized user-factor matrix
protected DenseMatrix P;
// factorized item-factor matrix
protected DenseMatrix Q;
// user biases
protected DenseVector userBias;
// item biases
protected DenseVector itemBias;
// adaptive learn rate
protected double lRate;
// objective loss
protected double loss, last_loss = 0;
// predictive measure
protected double measure, last_measure = 0;
// initial models using normal distribution
protected boolean initByNorm;
public IterativeRecommender(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
// initialization
if (resetStatics) {
resetStatics = false;
LineConfiger lc = cf.getParamOptions("learn.rate");
if (lc != null) {
initLRate = Float.parseFloat(lc.getMainParam());
maxLRate = lc.getFloat("-max", -1);
isBoldDriver = lc.contains("-bold-driver");
decay = lc.getFloat("-decay", -1);
momentum = lc.getFloat("-momentum", 50);
}
regOptions = cf.getParamOptions("reg.lambda");
if (regOptions != null) {
reg = Float.parseFloat(regOptions.getMainParam());
regU = regOptions.getFloat("-u", reg);
regI = regOptions.getFloat("-i", reg);
regB = regOptions.getFloat("-b", reg);
}
numFactors = cf.getInt("num.factors", 10);
numIters = cf.getInt("num.max.iter", 100);
}
// method-specific settings
lRate = initLRate;
initByNorm = true;
}
/**
* default prediction method
*/
@Override
public double predict(int u, int j) throws Exception {
return DenseMatrix.rowMult(P, u, Q, j);
}
/**
* Post each iteration, we do things:
*
* <ol>
* <li>print debug information</li>
* <li>check if converged</li>
* <li>if not, adjust learning rate</li>
* </ol>
*
* @param iter
* current iteration
*
* @return boolean: true if it is converged; false otherwise
* 2016/8/2 RB I don't know if this is correct or not. The logic of the original is preserved but
* I don't understand it that well.
* If we are using ranking metrics, then use loss as the early stop measure.
* If not, use whatever metric is specified. I am restricting it to rating metrics -- diversity and time
* don't make sense, but I'm not sure what a different ranking metric can't be specific for early stop.
*
*/
protected boolean isConverged(int iter) throws Exception {
float delta_loss = (float) (last_loss - loss);
if (earlyStopMeasure != null) {
if (measures.hasRankingMetrics()){
earlyStopMeasure = "Loss";
}
if (earlyStopMeasure.equals("Loss")) {
measure = loss;
last_measure = last_loss;
} else {
boolean flag = isResultsOut;
isResultsOut = false; // to stop outputs
measure = measures.getRatingMetric(earlyStopMeasure).getValue();
isResultsOut = flag; // recover the flag
}
}
float delta_measure = (float) (last_measure - measure);
// print out debug info
if (verbose) {
String learnRate = lRate > 0 ? ", learn_rate = " + (float) lRate : "";
String earlyStop = "";
if (earlyStopMeasure != null && !earlyStopMeasure.equals("Loss")) {
earlyStop = String.format(", %s = %.6f, delta_%s = %.6f", new Object[] { earlyStopMeasure,
(float) measure, earlyStopMeasure, delta_measure });
}
Logs.debug("{}{} iter {}: loss = {}, delta_loss = {}{}{}", new Object[] { algoName, foldInfo, iter,
(float) loss, delta_loss, earlyStop, learnRate });
}
if (Double.isNaN(loss) || Double.isInfinite(loss)) {
Logs.error("Loss = NaN or Infinity: current settings does not fit the recommender! Change the settings and try again!");
System.exit(-1);
}
// check if converged
boolean cond1 = Math.abs(loss) < 1e-5;
boolean cond2 = (delta_measure > 0) && (delta_measure < 1e-5);
boolean converged = cond1 || cond2;
// if not converged, update learning rate
if (!converged)
updateLRate(iter);
last_loss = loss;
last_measure = measure;
return converged;
}
/**
* Update current learning rate after each epoch <br/>
*
* <ol>
* <li>bold driver: Gemulla et al., Large-scale matrix factorization with distributed stochastic gradient descent,
* KDD 2011.</li>
* <li>constant decay: Niu et al, Hogwild!: A lock-free approach to parallelizing stochastic gradient descent, NIPS
* 2011.</li>
* <li>Leon Bottou, Stochastic Gradient Descent Tricks</li>
* <li>more ways to adapt learning rate can refer to: http://www.willamette.edu/~gorr/classes/cs449/momrate.html</li>
* </ol>
*
* @param iter
* the current iteration
*/
protected void updateLRate(int iter) {
if (lRate <= 0)
return;
if (isBoldDriver && iter > 1)
lRate = Math.abs(last_loss) > Math.abs(loss) ? lRate * 1.05 : lRate * 0.5;
else if (decay > 0 && decay < 1)
lRate *= decay;
// limit to max-learn-rate after update
if (maxLRate > 0 && lRate > maxLRate)
lRate = maxLRate;
}
@Override
protected void initModel() throws Exception {
P = new DenseMatrix(numUsers, numFactors);
Q = new DenseMatrix(numItems, numFactors);
// initialize model
if (initByNorm) {
P.init(initMean, initStd);
Q.init(initMean, initStd);
} else {
P.init(); // P.init(smallValue);
Q.init(); // Q.init(smallValue);
}
}
protected void saveModel() throws Exception {
// make a folder
String dirPath = FileIO.makeDirectory(tempDirPath, algoName);
// suffix info
String suffix = foldInfo + ".bin";
// writing training, test data
FileIO.serialize(trainMatrix, dirPath + "trainMatrix" + suffix);
FileIO.serialize(testMatrix, dirPath + "testMatrix" + suffix);
// write matrices P, Q
FileIO.serialize(P, dirPath + "userFactors" + suffix);
FileIO.serialize(Q, dirPath + "itemFactors" + suffix);
// write vectors
if (userBias != null)
FileIO.serialize(userBias, dirPath + "userBiases" + suffix);
if (itemBias != null)
FileIO.serialize(itemBias, dirPath + "itemBiases" + suffix);
Logs.debug("Learned models are saved to folder \"{}\"", dirPath);
}
protected void loadModel() throws Exception {
// make a folder
String dirPath = FileIO.makeDirectory(tempDirPath, algoName);
Logs.debug("A recommender model is loaded from {}", dirPath);
// suffix info
String suffix = foldInfo + ".bin";
trainMatrix = (SparseMatrix) FileIO.deserialize(dirPath + "trainMatrix" + suffix);
testMatrix = (SparseMatrix) FileIO.deserialize(dirPath + "testMatrix" + suffix);
// write matrices P, Q
P = (DenseMatrix) FileIO.deserialize(dirPath + "userFactors" + suffix);
Q = (DenseMatrix) FileIO.deserialize(dirPath + "itemFactors" + suffix);
// write vectors
userBias = (DenseVector) FileIO.deserialize(dirPath + "userBiases" + suffix);
itemBias = (DenseVector) FileIO.deserialize(dirPath + "itemBiases" + suffix);
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, initLRate, maxLRate, regB, regU, regI, numIters,
isBoldDriver });
}
}
| 9,013 | 29.350168 | 123 | java |
librec | librec-master/librec/src/main/java/librec/data/DiagMatrix.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Table;
/**
* Data Structure: Diagonal Matrix
*
* @author guoguibing
*
*/
public class DiagMatrix extends SparseMatrix {
private static final long serialVersionUID = -9186836460633909994L;
public DiagMatrix(int rows, int cols, Table<Integer, Integer, Double> dataTable, Multimap<Integer, Integer> colMap) {
super(rows, cols, dataTable, colMap);
}
public DiagMatrix(DiagMatrix mat) {
super(mat);
}
public DiagMatrix clone() {
return new DiagMatrix(this);
}
public DiagMatrix scale(double val) {
DiagMatrix res = this.clone();
for (int i = 0; i < res.numRows; i++)
res.set(i, i, this.get(i, i) * val);
return res;
}
public DiagMatrix add(DiagMatrix that) {
DiagMatrix res = this.clone();
for (int i = 0; i < res.numRows; i++)
res.set(i, i, this.get(i, i) + that.get(i, i));
return res;
}
/**
* Each diagonal entry addes {@code val}
*
* @param val
* a value to be added
* @return a new diagonal matrix
*/
public DiagMatrix add(double val) {
DiagMatrix res = this.clone();
for (int i = 0; i < res.numRows; i++)
res.set(i, i, this.get(i, i) + val);
return res;
}
public DiagMatrix minus(DiagMatrix that) {
DiagMatrix res = this.clone();
for (int i = 0; i < res.numRows; i++)
res.set(i, i, this.get(i, i) - that.get(i, i));
return res;
}
/**
* Each diagonal entry abstracts {@code val}
*
* @param val
* a value to be abstracted
* @return a new diagonal matrix
*/
public DiagMatrix minus(double val) {
DiagMatrix res = this.clone();
for (int i = 0; i < res.numRows; i++)
res.set(i, i, this.get(i, i) - val);
return res;
}
public static DiagMatrix eye(int n) {
Table<Integer, Integer, Double> vals = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
for (int i = 0; i < n; i++) {
vals.put(i, i, 1.0);
colMap.put(i, i);
}
return new DiagMatrix(n, n, vals, colMap);
}
}
| 2,854 | 23.194915 | 118 | java |
librec | librec-master/librec/src/main/java/librec/data/Context.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
/**
* Interface for contextual information.
*
* <p>
* Context information will be stored in the contextTable <UserId, ItemId, Context>, corresponding to dataTable <UserId,
* ItemId, Rating>. If only user context is used, contextTable is reduced to <UserId, -1, UserContext>; If only item
* context is used, contextTable is reduced to <-1, ItemId, ItemContext>. In this way, machine memory can be saved.
* </p>
*
* @author guoguibing
*
*/
public abstract class Context {
/* context related user */
private int user;
/* context related item */
private int item;
public Context(int user, int item) {
this.user = user;
this.item = item;
}
/**
* @return the user
*/
public int getUser() {
return user;
}
/**
* @param user
* the user to set
*/
public void setUser(int user) {
this.user = user;
}
/**
* @return the item
*/
public int getItem() {
return item;
}
/**
* @param item
* the item to set
*/
public void setItem(int item) {
this.item = item;
}
}
| 1,763 | 22.52 | 120 | java |
librec | librec-master/librec/src/main/java/librec/data/DataSplitter.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import librec.util.Debug;
import librec.util.FileIO;
import librec.util.Lists;
import librec.util.Logs;
import librec.util.Randoms;
import librec.util.Sortor;
import librec.util.Systems;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Table;
/**
* Class to split/sample rating matrix
*
* @author guoguibing
*
*/
public class DataSplitter {
// [row-id, col-id, rate]
private SparseMatrix rateMatrix;
// [row-id, col-id, fold-id]
private SparseMatrix assignMatrix;
// number of folds
private int numFold;
/**
* Construct a data splitter to split a given matrix into kfolds
*
* @param rateMatrix
* data matrix
* @param kfold
* number of folds to split
*/
public DataSplitter(SparseMatrix rateMatrix, int kfold) {
this.rateMatrix = rateMatrix;
splitFolds(kfold);
}
/**
* Construct a data splitter with data source of a given rate matrix
*
* @param rateMatrix
* data source
*/
public DataSplitter(SparseMatrix rateMatrix) {
this.rateMatrix = rateMatrix;
}
/**
* Split ratings into k-fold.
*
* @param kfold
* number of folds
*/
private void splitFolds(int kfold) {
assert kfold > 0;
assignMatrix = new SparseMatrix(rateMatrix);
int numRates = rateMatrix.getData().length;
numFold = kfold > numRates ? numRates : kfold;
// divide rating data into kfold sample of equal size
double[] rdm = new double[numRates];
int[] fold = new int[numRates];
double indvCount = (numRates + 0.0) / numFold;
for (int i = 0; i < numRates; i++) {
rdm[i] = Randoms.uniform(); // Math.random();
fold[i] = (int) (i / indvCount) + 1; // make sure that each fold has each size sample
}
Sortor.quickSort(rdm, fold, 0, numRates - 1, true);
int[] row_ptr = rateMatrix.getRowPointers();
int[] col_idx = rateMatrix.getColumnIndices();
int f = 0;
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
for (int idx = row_ptr[u], end = row_ptr[u + 1]; idx < end; idx++) {
int j = col_idx[idx];
// if randomly put an int 1-5 to entry (u, j), we cannot make sure equal size for each fold
assignMatrix.set(u, j, fold[f++]);
}
}
}
/**
* Split ratings into two parts: (ratio) training, (1-ratio) test subsets.
*
* @param ratio
* the ratio of training data over all the ratings.
*/
public SparseMatrix[] getRatioByRating(double ratio) {
assert (ratio > 0 && ratio < 1);
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
SparseVector uv = rateMatrix.row(u);
for (int j : uv.getIndex()) {
double rdm = Math.random();
if (rdm < ratio)
testMatrix.set(u, j, 0.0);
else
trainMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split the ratings (by date) into two parts: (ratio) training, (1-ratio) test subsets
*
* @param ratio
* the ratio of training data
* @param timestamps
* the timestamps of all rating data
*/
public SparseMatrix[] getRatioByRatingDate(double ratio, SparseMatrix timestamps) {
assert (ratio > 0 && ratio < 1);
// sort timestamps from smaller to larger
List<RatingContext> rcs = new ArrayList<>(timestamps.size());
int u, i, j;
long timestamp;
for (MatrixEntry me : rateMatrix) {
u = me.row();
i = me.column();
timestamp = (long) timestamps.get(u, i);
rcs.add(new RatingContext(u, i, timestamp));
}
Collections.sort(rcs);
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
int trainSize = (int) (rcs.size() * ratio);
for (i = 0; i < rcs.size(); i++) {
RatingContext rc = rcs.get(i);
u = rc.getUser();
j = rc.getItem();
if (i < trainSize)
testMatrix.set(u, j, 0.0);
else
trainMatrix.set(u, j, 0.0);
}
// release memory
rcs = null;
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split the ratings of each user (by date) into two parts: (ratio) training, (1-ratio) test subsets
*
* @param ratio
* the ratio of training data
* @param timeMatrix
* the timestamps of all rating data
*/
public SparseMatrix[] getRatioByUserDate(double ratio, SparseMatrix timeMatrix) {
assert (ratio > 0 && ratio < 1);
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int user = 0, um = rateMatrix.numRows; user < um; user++) {
List<Integer> unsortedItems = rateMatrix.getColumns(user);
int size = unsortedItems.size();
List<RatingContext> rcs = new ArrayList<>(size);
for (int item : unsortedItems) {
rcs.add(new RatingContext(user, item, (long) timeMatrix.get(user, item)));
}
Collections.sort(rcs);
int trainSize = (int) (rcs.size() * ratio);
for (int i = 0; i < rcs.size(); i++) {
RatingContext rc = rcs.get(i);
int u = rc.getUser();
int j = rc.getItem();
if (i < trainSize)
testMatrix.set(u, j, 0.0);
else
trainMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split the ratings of each item (by date) into two parts: (ratio) training, (1-ratio) test subsets
*
* @param ratio
* the ratio of training data
* @param timestamps
* the timestamps of all rating data
*/
public SparseMatrix[] getRatioByItemDate(double ratio, SparseMatrix timestamps) {
assert (ratio > 0 && ratio < 1);
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int item = 0, im = rateMatrix.numColumns; item < im; item++) {
List<Integer> unsortedUsers = rateMatrix.getRows(item);
int size = unsortedUsers.size();
List<RatingContext> rcs = new ArrayList<>(size);
for (int user : unsortedUsers) {
rcs.add(new RatingContext(user, item, (long) timestamps.get(user, item)));
}
Collections.sort(rcs);
int trainSize = (int) (rcs.size() * ratio);
for (int i = 0; i < rcs.size(); i++) {
RatingContext rc = rcs.get(i);
int u = rc.getUser();
int j = rc.getItem();
if (i < trainSize)
testMatrix.set(u, j, 0.0);
else
trainMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into: (train-ratio) training, (validation-ratio) validation, and test three subsets.
*
* @param trainRatio
* training ratio
* @param validRatio
* validation ratio
*/
public SparseMatrix[] getRatio(double trainRatio, double validRatio) {
assert (trainRatio > 0 && validRatio > 0 && (trainRatio + validRatio) < 1);
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix validMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
double sum = trainRatio + validRatio;
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
SparseVector uv = rateMatrix.row(u);
for (int j : uv.getIndex()) {
double rdm = Math.random();
if (rdm < trainRatio) {
// for training
testMatrix.set(u, j, 0);
validMatrix.set(u, j, 0);
} else if (rdm < sum) {
// for validation
trainMatrix.set(u, j, 0);
testMatrix.set(u, j, 0);
} else {
// for test
trainMatrix.set(u, j, 0);
validMatrix.set(u, j, 0);
}
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(validMatrix);
SparseMatrix.reshape(testMatrix);
return new SparseMatrix[] { trainMatrix, validMatrix, testMatrix };
}
/**
* Split ratings into two parts where one rating per user is preserved as the test set and the remaining data as the
* training set
*
*/
public SparseMatrix[] getLOOByUser(boolean isByDate, SparseMatrix timestamps) throws Exception {
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
// for building test matrix
Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
List<Integer> items = rateMatrix.getColumns(u);
int i = -1;
if (!isByDate) {
// by random
int randIdx = (int) (items.size() * Math.random());
i = items.get(randIdx);
} else {
// by date
List<RatingContext> rcs = new ArrayList<>();
for (int j : items) {
rcs.add(new RatingContext(u, j, (long) timestamps.get(u, j)));
}
Collections.sort(rcs);
i = rcs.get(rcs.size() - 1).getItem(); // most recent item
}
trainMatrix.set(u, i, 0); // remove from training
dataTable.put(u, i, rateMatrix.get(u, i));
colMap.put(i, u);
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
// build test matrix
SparseMatrix testMatrix = new SparseMatrix(rateMatrix.numRows, rateMatrix.numColumns, dataTable, colMap);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts where one rating per item is preserved as the test set and the remaining data as the
* training set
*
*/
public SparseMatrix[] getLOOByItem(boolean isByDate, SparseMatrix timestamps) throws Exception {
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
// for building test matrix
Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
for (int i = 0, im = rateMatrix.numColumns(); i < im; i++) {
List<Integer> users = rateMatrix.getRows(i);
int u = -1;
if (!isByDate) {
// by random
int randIdx = (int) (users.size() * Math.random());
u = users.get(randIdx);
} else {
// by date
List<RatingContext> rcs = new ArrayList<>();
for (int v : users) {
rcs.add(new RatingContext(v, i, (long) timestamps.get(v, i)));
}
Collections.sort(rcs);
u = rcs.get(rcs.size() - 1).getUser(); // most recent rating user
}
trainMatrix.set(u, i, 0); // remove from training
dataTable.put(u, i, rateMatrix.get(u, i));
colMap.put(i, u);
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
// build test matrix
SparseMatrix testMatrix = new SparseMatrix(rateMatrix.numRows, rateMatrix.numColumns, dataTable, colMap);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts: the training set consisting of user-item ratings where {@code numGiven} ratings are
* preserved for each user, and the rest are used as the testing data
*
*/
public SparseMatrix[] getGivenNByUser(int numGiven) throws Exception {
assert numGiven > 0;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
List<Integer> items = rateMatrix.getColumns(u);
int numRated = items.size();
if (numRated > numGiven) {
// a set of sampled indices of rated items
int[] givenIndex = Randoms.nextIntArray(numGiven, numRated);
for (int i = 0, j = 0; j < numRated; j++) {
if (i < givenIndex.length && givenIndex[i] == j) {
// for training
testMatrix.set(u, items.get(j), 0.0);
i++;
} else {
// for testing
trainMatrix.set(u, items.get(j), 0.0);
}
}
} else {
// all ratings are used for training
for (int j : items)
testMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts: the training set consisting of user-item ratings where {@code numGiven} earliest
* ratings are preserved for each user, and the rest are used as the testing data
*
*/
public SparseMatrix[] getGivenNByUserDate(int numGiven, SparseMatrix timestamps) throws Exception {
assert numGiven > 0;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
List<Integer> items = rateMatrix.getColumns(u);
int capacity = Lists.initSize(items.size());
List<RatingContext> rcs = new ArrayList<>(capacity);
for (int j : items) {
rcs.add(new RatingContext(u, j, (long) timestamps.get(u, j)));
}
Collections.sort(rcs);
for (int i = 0; i < rcs.size(); i++) {
RatingContext rc = rcs.get(i);
int j = rc.getItem();
if (i < numGiven)
testMatrix.set(u, j, 0.0);
else
trainMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts: the training set consisting of user-item ratings where {@code numGiven} earliest
* ratings are preserved for each item, and the rest are used as the testing data
*
*/
public SparseMatrix[] getGivenNByItemDate(int numGiven, SparseMatrix timestamps) throws Exception {
assert numGiven > 0;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int j = 0, jm = rateMatrix.numRows(); j < jm; j++) {
List<Integer> users = rateMatrix.getRows(j);
int capacity = Lists.initSize(users.size());
List<RatingContext> rcs = new ArrayList<>(capacity);
for (int u : users) {
rcs.add(new RatingContext(u, j, (long) timestamps.get(u, j)));
}
Collections.sort(rcs);
for (int i = 0; i < rcs.size(); i++) {
RatingContext rc = rcs.get(i);
int u = rc.getUser();
if (i < numGiven)
testMatrix.set(u, j, 0.0);
else
trainMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts: the training set consisting of user-item ratings where {@code numGiven} ratings are
* preserved for each item, and the rest are used as the testing data
*
*/
public SparseMatrix[] getGivenNByItem(int numGiven) throws Exception {
assert numGiven > 0;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int j = 0, jm = rateMatrix.numColumns(); j < jm; j++) {
List<Integer> users = rateMatrix.getRows(j);
int numRated = users.size();
if (numRated > numGiven) {
// a set of sampled indices of rated items
int[] givenIndex = Randoms.nextIntArray(numGiven, numRated);
for (int i = 0, k = 0; k < numRated; k++) {
if (i < givenIndex.length && givenIndex[i] == k) {
// for training
testMatrix.set(users.get(k), j, 0.0);
i++;
} else {
// for testing
trainMatrix.set(users.get(k), j, 0.0);
}
}
} else {
// all ratings are used for training
for (int u : users)
testMatrix.set(u, j, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts: the training set consisting of user-item ratings where {@code ratio} percentage of
* ratings are preserved for each user, and the rest are used as the testing data
*
*/
public SparseMatrix[] getRatioByUser(double ratio) throws Exception {
assert ratio > 0 && ratio < 1;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
List<Integer> items = rateMatrix.getColumns(u);
for (int j : items) {
double rand = Math.random();
if (rand < ratio)
testMatrix.set(u, j, 0.0); // for training
else
trainMatrix.set(u, j, 0.0); // for testing
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Split ratings into two parts: the training set consisting of user-item ratings where {@code ratio} percentage of
* ratings are preserved for each item, and the rest are used as the testing data
*
*/
public SparseMatrix[] getRatioByItem(double ratio) throws Exception {
assert ratio > 0 && ratio < 1;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int i = 0, im = rateMatrix.numColumns(); i < im; i++) {
List<Integer> users = rateMatrix.getRows(i);
for (int u : users) {
double rand = Math.random();
if (rand < ratio)
// for training
testMatrix.set(u, i, 0.0);
else
// for testing
trainMatrix.set(u, i, 0.0);
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, -1);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* generate a random sample of rate matrix with specified number of users and items
*
* @param numUsers
* number of users, -1 to use all users;
* @param numItems
* number of items, -1 to user all items;
*/
public void getSample(int numUsers, int numItems) throws Exception {
int rows = rateMatrix.numRows();
int cols = rateMatrix.numColumns();
int users = numUsers <= 0 || numUsers > rows ? rows : numUsers;
int items = numItems <= 0 || numItems > cols ? cols : numItems;
int[] userIds = Randoms.nextIntArray(users, rows);
int[] itemIds = Randoms.nextIntArray(items, cols);
String path = FileIO.desktop + "sample.txt";
FileIO.deleteFile(path);
List<String> lines = new ArrayList<>(2000);
int cnt = 0;
for (int userId : userIds) {
for (int itemId : itemIds) {
double rate = rateMatrix.get(userId, itemId);
if (rate > 0) {
lines.add((userId + 1) + " " + (itemId + 1) + " " + (float) rate);
cnt++;
if (lines.size() >= 1500) {
FileIO.writeList(path, lines, null, true);
lines.clear();
}
}
}
}
if (lines.size() > 0)
FileIO.writeList(path, lines, null, true);
Logs.debug("Sample [size: {}] has been created!", cnt);
}
public SparseMatrix[] getDataView(String view) {
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
switch (view.toLowerCase()) {
case "cold-start":
for (int u = 0, um = rateMatrix.numRows; u < um; u++) {
SparseVector uv = rateMatrix.row(u);
if (uv.getCount() < 5) {
for (int i : uv.getIndex())
trainMatrix.set(u, i, 0.0);
} else {
for (int i : uv.getIndex())
testMatrix.set(u, i, 0.0);
}
}
break;
default:
return null;
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* Return the k-th fold as test set (testMatrix), making all the others as train set in rateMatrix.
*
* @param k
* The index for desired fold.
* @return Rating matrices {k-th train data, k-th test data}
*/
public SparseMatrix[] getKthFold(int k) {
if (k > numFold || k < 1)
return null;
SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
SparseMatrix testMatrix = new SparseMatrix(rateMatrix);
for (int u = 0, um = rateMatrix.numRows(); u < um; u++) {
SparseVector items = rateMatrix.row(u);
for (int j : items.getIndex()) {
if (assignMatrix.get(u, j) == k)
trainMatrix.set(u, j, 0.0); // keep test data and remove train data
else
testMatrix.set(u, j, 0.0); // keep train data and remove test data
}
}
// remove zero entries
SparseMatrix.reshape(trainMatrix);
SparseMatrix.reshape(testMatrix);
debugInfo(trainMatrix, testMatrix, k);
return new SparseMatrix[] { trainMatrix, testMatrix };
}
/**
* print out debug information
*/
private void debugInfo(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
String foldInfo = fold > 0 ? "Fold [" + fold + "]: " : "";
Logs.debug("{}training amount: {}, test amount: {}", foldInfo, trainMatrix.size(), testMatrix.size());
if (Debug.OFF) {
String dir = Systems.getDesktop();
try {
FileIO.writeString(dir + "training.txt", trainMatrix.toString());
FileIO.writeString(dir + "test.txt", testMatrix.toString());
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
| 22,529 | 26.276029 | 117 | java |
librec | librec-master/librec/src/main/java/librec/data/DataDAO.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import librec.util.Dates;
import librec.util.FileIO;
import librec.util.Logs;
import librec.util.Stats;
import librec.util.Strings;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multiset;
import com.google.common.collect.Table;
/**
* A data access object (DAO) to a data file
*
* @author guoguibing
*
*/
public class DataDAO {
// name of data file
private String dataName;
// directory of data file
private String dataDir;
// path to data file
private String dataPath;
// store rate data as {user, item, rate} matrix
private SparseMatrix rateMatrix;
// store time data as {user, item, timestamp} matrix
private SparseMatrix timeMatrix;
// store rate data as a sparse tensor
private SparseTensor rateTensor;
// is item type as user
private boolean isItemAsUser;
// is first head line
private boolean isHeadline = false;
// data scales
private List<Double> ratingScale;
// scale distribution
private Multiset<Double> scaleDist;
// number of rates
private int numRatings;
// user/item {raw id, inner id} map
private BiMap<String, Integer> userIds, itemIds;
private BiMap<String, Integer>[] featureIds;
// inverse views of userIds, itemIds
private BiMap<Integer, String> idUsers, idItems;
// time unit may depend on data sets, e.g. in MovieLens, it is unix seconds
private TimeUnit timeUnit;
// minimum/maximum rating timestamp
private long minTimestamp, maxTimestamp;
/**
* Constructor for a data DAO object
*
* @param path
* path to data file
*
* @param userIds
* user: {raw id, inner id} map
* @param itemIds
* item: {raw id, inner id} map
*/
public DataDAO(String path, BiMap<String, Integer> userIds, BiMap<String, Integer> itemIds) {
dataPath = path;
if (userIds == null)
this.userIds = HashBiMap.create();
else
this.userIds = userIds;
if (itemIds == null)
this.itemIds = HashBiMap.create();
else
this.itemIds = itemIds;
scaleDist = HashMultiset.create();
isItemAsUser = this.userIds == this.itemIds;
timeUnit = TimeUnit.SECONDS;
}
/**
* Contructor for data DAO object
*
* @param path
* path to data file
*/
public DataDAO(String path) {
this(path, null, null);
}
/**
* Contructor for data DAO object
*
*/
public DataDAO(String path, BiMap<String, Integer> userIds) {
this(path, userIds, userIds);
}
/**
* Default relevant columns {0: user column, 1: item column, 2: rate column}; default recommendation task is rating
* prediction;
*
*
* @return a sparse matrix storing all the relevant data
*/
public SparseMatrix[] readData() throws Exception {
return readData(new int[] { 0, 1, 2 }, -1);
}
/**
* @param isCCSUsed
* whether to construct CCS structures while reading data
*/
public SparseMatrix[] readData(double binThold) throws Exception {
return readData(new int[] { 0, 1, 2 }, binThold);
}
/**
* Read data from the data file. Note that we didn't take care of the duplicated lines.
*
* @param cols
* the indexes of the relevant columns in the data file: {user, item, [rating, timestamp] (optional)}
* @param binThold
* the threshold to binarize a rating. If a rating is greater than the threshold, the value will be 1;
* otherwise 0. To disable this feature, i.e., keep the original rating value, set the threshold a
* negative value
* @return a sparse matrix storing all the relevant data
*/
public SparseMatrix[] readData(int[] cols, double binThold) throws Exception {
Logs.info(String.format("Dataset: %s", Strings.last(dataPath, 38)));
// Table {row-id, col-id, rate}
Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
// Table {row-id, col-id, timestamp}
Table<Integer, Integer, Long> timeTable = null;
// Map {col-id, multiple row-id}: used to fast build a rating matrix
Multimap<Integer, Integer> colMap = HashMultimap.create();
BufferedReader br = FileIO.getReader(dataPath);
String line = null;
minTimestamp = Long.MAX_VALUE;
maxTimestamp = Long.MIN_VALUE;
while ((line = br.readLine()) != null) {
if (isHeadline()) {
setHeadline(false);
continue;
}
String[] data = line.trim().split("[ \t,]+");
String user = data[cols[0]];
String item = data[cols[1]];
Double rate = (cols.length >= 3 && data.length >= 3) ? Double.valueOf(data[cols[2]]) : 1.0;
// binarize the rating for item recommendation task
if (binThold >= 0)
rate = rate > binThold ? 1.0 : 0.0;
scaleDist.add(rate);
// inner id starting from 0
int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
userIds.put(user, row);
int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
itemIds.put(item, col);
dataTable.put(row, col, rate);
colMap.put(col, row);
// record rating's issuing time
if (cols.length >= 4 && data.length >= 4) {
if (timeTable == null)
timeTable = HashBasedTable.create();
// convert to million-seconds
long mms = 0L;
try {
mms = Long.parseLong(data[cols[3]]); // cannot format "9.7323480e+008"
} catch (NumberFormatException e) {
mms = (long) Double.parseDouble(data[cols[3]]);
}
long timestamp = timeUnit.toMillis(mms);
if (minTimestamp > timestamp)
minTimestamp = timestamp;
if (maxTimestamp < timestamp)
maxTimestamp = timestamp;
timeTable.put(row, col, timestamp);
}
}
br.close();
numRatings = scaleDist.size();
ratingScale = new ArrayList<>(scaleDist.elementSet());
Collections.sort(ratingScale);
int numRows = numUsers(), numCols = numItems();
// if min-rate = 0.0, shift upper a scale
double minRate = ratingScale.get(0).doubleValue();
double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
if (epsilon > 0) {
// shift upper a scale
for (int i = 0, im = ratingScale.size(); i < im; i++) {
double val = ratingScale.get(i);
ratingScale.set(i, val + epsilon);
}
// update data table
for (int row = 0; row < numRows; row++) {
for (int col = 0; col < numCols; col++) {
if (dataTable.contains(row, col))
dataTable.put(row, col, dataTable.get(row, col) + epsilon);
}
}
}
String dateRange = "";
if (cols.length >= 4)
dateRange = String.format(", Timestamps = {%s, %s}", Dates.toString(minTimestamp),
Dates.toString(maxTimestamp));
Logs.debug("With Specs: {Users, {}} = {{}, {}, {}}, Scale = {{}}{}", (isItemAsUser ? "Users, Links"
: "Items, Ratings"), numRows, numCols, numRatings, Strings.toString(ratingScale), dateRange);
// build rating matrix
rateMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);
if (timeTable != null)
timeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);
// release memory of data table
dataTable = null;
timeTable = null;
return new SparseMatrix[] { rateMatrix, timeMatrix };
}
/**
* Read data from the data file. Note that we didn't take care of the duplicated lines.
*
* @param cols
* the indexes of the relevant columns in the data file: {user, item, rating}, other columns are treated
* as features
* @param binThold
* the threshold to binarize a rating. If a rating is greater than the threshold, the value will be 1;
* otherwise 0. To disable this feature, i.e., keep the original rating value, set the threshold a
* negative value
* @return a sparse tensor storing all the relevant data
*/
@SuppressWarnings("unchecked")
public SparseMatrix[] readTensor(int[] cols, double binThold) throws Exception {
if (cols.length < 3)
throw new Exception("Column length cannot be smaller than 3. Usage: user, item, rating columns.");
Logs.info(String.format("Dataset: %s", Strings.last(dataPath, 38)));
int[] dims = null;
int numDims = 0;
List<Integer>[] ndLists = null;
Set<Integer>[] ndSets = null;
List<Double> vals = new ArrayList<Double>();
BufferedReader br = FileIO.getReader(dataPath);
String line = null;
while ((line = br.readLine()) != null) {
if (isHeadline()) {
setHeadline(false);
continue;
}
String[] data = line.trim().split("[ \t,]+");
// initialization
if (dims == null) {
numDims = data.length - 1;
dims = new int[numDims];
ndLists = (List<Integer>[]) new List<?>[numDims];
ndSets = (Set<Integer>[]) new Set<?>[numDims];
for (int d = 0; d < numDims; d++) {
ndLists[d] = new ArrayList<Integer>();
ndSets[d] = new HashSet<Integer>();
}
int featureDims = numDims - 2; // feature dimension should exclude user and item
featureIds = new BiMap[featureDims];
for (int d = 0; d < featureDims; d++){
featureIds[d] = HashBiMap.create();
}
}
// set data
for (int d = 0; d < data.length; d++) {
String val = data[d];
int feature = -1;
if (d == cols[0]) {
// user
feature = userIds.containsKey(val) ? userIds.get(val) : userIds.size();
userIds.put(val, feature);
} else if (d == cols[1]) {
// item
feature = itemIds.containsKey(val) ? itemIds.get(val) : itemIds.size();
itemIds.put(val, feature);
} else if (d == cols[2]) {
// rating
double rate = Double.parseDouble(val);
// binarize the rating for item recommendation task
if (binThold >= 0)
rate = rate > binThold ? 1.0 : 0.0;
vals.add(rate);
scaleDist.add(rate);
continue;
} else {
// other: val as feature value
int featureDim = d - 3;
feature = (int) (featureIds[featureDim].containsKey(val) ? featureIds[featureDim].get(val) : featureIds[featureDim].size());
featureIds[featureDim].put(val, feature);
}
int dim = d > cols[2] ? d - 1 : d;
ndLists[dim].add(feature);
ndSets[dim].add(feature);
}
}
br.close();
numRatings = scaleDist.size();
ratingScale = new ArrayList<>(scaleDist.elementSet());
Collections.sort(ratingScale);
// if min-rate = 0.0, shift upper a scale
double minRate = ratingScale.get(0).doubleValue();
double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
if (epsilon > 0) {
// shift upper a scale
for (int i = 0, im = ratingScale.size(); i < im; i++) {
double val = ratingScale.get(i);
ratingScale.set(i, val + epsilon);
}
// update rating values
for (int i = 0; i < vals.size(); i++) {
vals.set(i, vals.get(i) + epsilon);
}
}
// get dimensions
int numRows = numUsers(), numCols = numItems();
for (int d = 0; d < numDims; d++) {
dims[d] = ndSets[d].size();
}
// debug info
Logs.debug("With Specs: {Users, Items, Ratings, Features} = {{}, {}, {}, {}}, Scale = {{}}", numRows, numCols,
numRatings, (numDims - 2), Strings.toString(ratingScale));
rateTensor = new SparseTensor(dims, ndLists, vals);
rateTensor.setUserDimension(cols[0]);
rateTensor.setItemDimension(cols[1]);
return new SparseMatrix[] { rateTensor.rateMatrix(), null };
}
/**
* write the rate data to another data file given by the path {@code toPath}
*
* @param toPath
* the data file to write to
* @param sep
* the sparator of the written data file
*/
public void writeData(String toPath, String sep) throws Exception {
FileIO.deleteFile(toPath);
List<String> lines = new ArrayList<>(1500);
for (MatrixEntry me : rateMatrix) {
String line = Strings.toString(new Object[] { me.row() + 1, me.column() + 1, (float) me.get() }, sep);
lines.add(line);
if (lines.size() >= 1000) {
FileIO.writeList(toPath, lines, null, true);
lines.clear();
}
}
if (lines.size() > 0)
FileIO.writeList(toPath, lines, null, true);
Logs.debug("Data has been exported to {}", toPath);
}
/**
* Default sep=" " is adopted
*/
public void writeData(String toPath) throws Exception {
writeData(toPath, " ");
}
/**
* Write rate matrix to a data file with format ".arff" which can be used by the PREA toolkit
*
* @param relation
* relation name of dataset
* @param toPath
* data file path
*/
public void writeArff(String relation, String toPath) throws Exception {
FileIO.deleteFile(toPath);
BufferedWriter bw = FileIO.getWriter(toPath);
bw.write("@RELATION " + relation + "\n\n");
bw.write("@ATTRIBUTE UserId NUMERIC\n\n");
bw.write("@DATA\n");
StringBuilder sb = new StringBuilder();
int count = 0;
for (int u = 0, um = numUsers(); u < um; u++) {
sb.append("{0 " + (u + 1));
for (int j = 0, jm = numItems(); j < jm; j++) {
double rate = rateMatrix.get(u, j);
if (rate != 0)
sb.append(", " + (j + 1) + " " + rate);
if (j == jm - 1)
sb.append("}\n");
}
if (count++ >= 500) {
bw.write(sb.toString());
count = 0;
sb = new StringBuilder();
}
}
if (count > 0)
bw.write(sb.toString());
bw.close();
Logs.debug("Data has been exported to {}", toPath);
}
/**
* print out specifications of the dataset
*/
public void printSpecs() throws Exception {
if (rateMatrix == null)
readData();
List<String> sps = new ArrayList<>();
int users = numUsers();
int items = numItems();
int numRates = rateMatrix.size();
sps.add(String.format("Dataset: %s", dataPath));
sps.add("User amount: " + users + ", " + FileIO.formatSize(users));
if (!isItemAsUser)
sps.add("Item amount: " + items + ", " + FileIO.formatSize(items));
sps.add("Rate amount: " + numRates + ", " + FileIO.formatSize(numRates));
sps.add(String.format("Data density: %.4f%%", (numRates + 0.0) / users / items * 100));
sps.add("Scale distribution: " + scaleDist.toString());
// user/item mean
double[] data = rateMatrix.getData();
float mean = (float) (Stats.sum(data) / numRates);
float std = (float) Stats.sd(data);
float mode = (float) Stats.mode(data);
float median = (float) Stats.median(data);
sps.add("");
sps.add(String.format("Average value of all ratings: %f", mean));
sps.add(String.format("Standard deviation of all ratings: %f", std));
sps.add(String.format("Mode of all rating values: %f", mode));
sps.add(String.format("Median of all rating values: %f", median));
List<Integer> userCnts = new ArrayList<>();
int userMax = 0, userMin = Integer.MAX_VALUE;
for (int u = 0, um = numUsers(); u < um; u++) {
int size = rateMatrix.rowSize(u);
if (size > 0) {
userCnts.add(size);
if (size > userMax)
userMax = size;
if (size < userMin)
userMin = size;
}
}
sps.add("");
sps.add(String.format("Max number of ratings per user: %d", userMax));
sps.add(String.format("Min number of ratings per user: %d", userMin));
sps.add(String.format("Average number of ratings per user: %f", (float) Stats.mean(userCnts)));
sps.add(String.format("Standard deviation of number of ratings per user: %f", (float) Stats.sd(userCnts)));
if (!isItemAsUser) {
List<Integer> itemCnts = new ArrayList<>();
int itemMax = 0, itemMin = Integer.MAX_VALUE;
for (int j = 0, jm = numItems(); j < jm; j++) {
int size = rateMatrix.columnSize(j);
if (size > 0) {
itemCnts.add(size);
if (size > itemMax)
itemMax = size;
if (size < itemMin)
itemMin = size;
}
}
sps.add("");
sps.add(String.format("Max number of ratings per item: %d", itemMax));
sps.add(String.format("Min number of ratings per item: %d", itemMin));
sps.add(String.format("Average number of ratings per item: %f", (float) Stats.mean(itemCnts)));
sps.add(String.format("Standard deviation of number of ratings per item: %f", (float) Stats.sd(itemCnts)));
}
Logs.info(Strings.toSection(sps));
}
/**
* print out distributions of the dataset <br/>
*
* <ul>
* <li>#users (y) -- #ratings (x) (that are issued by each user)</li>
* <li>#items (y) -- #ratings (x) (that received by each item)</li>
* </ul>
*/
public void printDistr(boolean isWriteOut) throws Exception {
if (rateMatrix == null)
readData();
// count how many users give the same number of ratings
Multiset<Integer> numURates = HashMultiset.create();
// count how many items recieve the same number of ratings
Multiset<Integer> numIRates = HashMultiset.create();
for (int r = 0, rm = rateMatrix.numRows; r < rm; r++) {
int numRates = rateMatrix.rowSize(r);
numURates.add(numRates);
}
for (int c = 0, cm = rateMatrix.numColumns; c < cm; c++) {
int numRates = rateMatrix.columnSize(c);
numIRates.add(numRates);
}
String ustrs = Strings.toString(numURates);
String istrs = Strings.toString(numIRates);
if (isWriteOut) {
FileIO.writeString(FileIO.desktop + "user-distr.txt", ustrs);
FileIO.writeString(FileIO.desktop + "item-distr.txt", istrs);
} else {
Logs.debug("#ratings (x) ~ #users (y): \n" + ustrs);
Logs.debug("#ratings (x) ~ #items (y): \n" + istrs);
}
Logs.debug("Done!");
}
/**
* @return number of users
*/
public int numUsers() {
return userIds.size();
}
/**
* @return number of items
*/
public int numItems() {
return itemIds.size();
}
/**
* @return number of rates
*/
public int numRatings() {
return numRatings;
}
/**
* @return number of days
*/
public int numDays() {
return (int) TimeUnit.MILLISECONDS.toDays(maxTimestamp - minTimestamp);
}
/**
* @param rawId
* raw user id as String
* @return inner user id as int
*/
public int getUserId(String rawId) {
return userIds.get(rawId);
}
/**
* @param innerId
* inner user id as int
* @return raw user id as String
*/
public String getUserId(int innerId) {
if (idUsers == null)
idUsers = userIds.inverse();
return idUsers.get(innerId);
}
/**
* @param rawId
* raw item id as String
* @return inner item id as int
*/
public int getItemId(String rawId) {
return itemIds.get(rawId);
}
/**
* @param innerId
* inner user id as int
* @return raw item id as String
*/
public String getItemId(int innerId) {
if (idItems == null)
idItems = itemIds.inverse();
return idItems.get(innerId);
}
/**
* @return the path to the dataset file
*/
public String getDataPath() {
return dataPath;
}
/**
* @return the rate matrix
*/
public SparseMatrix getRateMatrix() {
return rateMatrix;
}
/**
* @return whether "items" are users, useful for social reltions
*/
public boolean isItemAsUser() {
return isItemAsUser;
}
/**
* @return rating scales
*/
public List<Double> getRatingScale() {
return ratingScale;
}
/**
* @return user {rawid, inner id} mappings
*/
public BiMap<String, Integer> getUserIds() {
return userIds;
}
/**
* @return item {rawid, inner id} mappings
*/
public BiMap<String, Integer> getItemIds() {
return itemIds;
}
/**
* @return name of the data file with file type extension
*/
public String getDataName() {
if (dataName == null) {
dataName = dataPath.substring(dataPath.lastIndexOf(File.separator) + 1, dataPath.lastIndexOf("."));
}
return dataName;
}
/**
* @return directory of the data file
*/
public String getDataDirectory() {
if (dataDir == null) {
int pos = dataPath.lastIndexOf(File.separator);
dataDir = pos > 0 ? dataPath.substring(0, pos + 1) : "." + File.separator;
}
return dataDir;
}
/**
* set the time unit of the data file
*/
public void setTimeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
}
/**
* @return the minimum timestamp
*/
public long getMinTimestamp() {
return minTimestamp;
}
/**
* @return the maximum timestamp
*/
public long getMaxTimestamp() {
return maxTimestamp;
}
public SparseTensor getRateTensor() {
return rateTensor;
}
public boolean isHeadline() {
return isHeadline;
}
public void setHeadline(boolean isHeadline) {
this.isHeadline = isHeadline;
}
}
| 21,179 | 25.810127 | 129 | java |
librec | librec-master/librec/src/main/java/librec/data/SparseVector.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import librec.util.Stats;
/**
* Data Structure: Sparse Vector whose implementation is modified from M4J
* library
*
* @author guoguibing
*
*/
public class SparseVector implements Iterable<VectorEntry>, Serializable {
private static final long serialVersionUID = 1151609203685872657L;
// capacity
protected int capacity;
// data
protected double[] data;
// Indices to data
protected int[] index;
// number of items
protected int count;
/**
* Construct a sparse vector with its maximum capacity
*
* @param capcity
* maximum size of the sparse vector
*/
public SparseVector(int capcity) {
this.capacity = capcity;
data = new double[0];
count = 0;
index = new int[0];
}
/**
* Construct a sparse vector with its maximum capacity, filled with given
* data array
*
* @param capcity
* maximum size of the sparse vector
* @param array
* input data
*/
public SparseVector(int capcity, double[] array) {
this(capcity);
for (int i = 0; i < array.length; i++)
if (array[i] != 0)
this.set(i, array[i]);
}
/**
* Construct a sparse vecto by deeply copying another vector
*/
public SparseVector(SparseVector sv) {
this(sv.capacity, sv.data);
}
/**
* Check if a vector contains a specific index
*
* @param idx
* the idex to search
*/
public boolean contains(int idx) {
return Arrays.binarySearch(index, idx) >= 0;
}
/**
* @return a copy of internal data (to prevent changes outside)
*/
public double[] getData() {
double[] res = new double[count];
for (int i = 0; i < count; i++)
res[i] = data[i];
return res;
}
/**
* @return a copy of indices (to prevent changes outside)
*/
public int[] getIndex() {
int[] res = new int[count];
for (int i = 0; i < count; i++)
res[i] = index[i];
return res;
}
/**
* @return a list of indices (to prevent changes outside)
*/
public List<Integer> getIndexList() {
List<Integer> res = new ArrayList<>((int) (count * 1.5));
for (int i = 0; i < count; i++)
res.add(index[i]);
return res;
}
/**
* Number of entries in the sparse structure
*/
public int getCount() {
return count;
}
/**
* Set a value to entry [idx]
*/
public void set(int idx, double val) {
check(idx);
int i = getIndex(idx);
data[i] = val;
}
/**
* Add a value to entry [idx]
*/
public void add(int idx, double val) {
check(idx);
int i = getIndex(idx);
data[i] += val;
}
/**
* Retrieve a value at entry [idx]
*/
public double get(int idx) {
check(idx);
int i = Arrays.binarySearch(index, 0, count, idx);
return i >= 0 ? data[i] : 0;
}
/**
* @return inner product with a given sparse vector
*/
public double inner(SparseVector vec) {
double res = 0;
for (int idx : this.getIndex()) {
if (vec.contains(idx))
res += get(idx) * vec.get(idx);
}
return res;
}
/**
* @return inner product with a given dense vector
*/
public double inner(DenseVector vec) {
double res = 0;
for (int idx : this.getIndex())
res += get(idx) * vec.get(idx);
return res;
}
/**
* @return sum of vector entries
*/
public double sum() {
return Stats.sum(data);
}
/**
* @return mean of vector entries
*/
public double mean() {
return sum() / count;
}
/**
* @return the cardinary of a sparse vector
*/
public int size() {
int num = 0;
for (VectorEntry ve : this)
if (ve.get() != 0)
num++;
return num;
}
/**
* Checks the index
*/
protected void check(int idx) {
if (idx < 0)
throw new IndexOutOfBoundsException("index is negative (" + idx + ")");
if (idx >= capacity)
throw new IndexOutOfBoundsException("index >= size (" + idx + " >= " + capacity + ")");
}
/**
* Tries to find the index. If it is not found, a reallocation is done, and
* a new index is returned.
*/
private int getIndex(int idx) {
// Try to find column index
int i = Arrays.binarySearch(index, 0, count, idx);
// Found
if (i >= 0 && index[i] == idx)
return i;
int[] newIndex = index;
double[] newData = data;
// get insert position
i = -(i + 1);
// Check available memory
if (++count > data.length) {
// If zero-length, use new length of 1, else double the bandwidth
int newLength = data.length != 0 ? data.length << 1 : 1;
// Copy existing data into new arrays
newIndex = new int[newLength];
newData = new double[newLength];
System.arraycopy(index, 0, newIndex, 0, i);
System.arraycopy(data, 0, newData, 0, i);
}
// All ok, make room for insertion
System.arraycopy(index, i, newIndex, i + 1, count - i - 1);
System.arraycopy(data, i, newData, i + 1, count - i - 1);
// Put in new structure
newIndex[i] = idx;
newData[i] = 0.;
// Update pointers
index = newIndex;
data = newData;
// Return insertion index
return i;
}
public Iterator<VectorEntry> iterator() {
return new SparseVecIterator();
}
/**
* Iterator over a sparse vector
*/
private class SparseVecIterator implements Iterator<VectorEntry> {
private int cursor;
private final SparseVecEntry entry = new SparseVecEntry();
public boolean hasNext() {
return cursor < count;
}
public VectorEntry next() {
entry.update(cursor);
cursor++;
return entry;
}
public void remove() {
entry.set(0);
}
}
/**
* Entry of a sparse vector
*/
private class SparseVecEntry implements VectorEntry {
private int cursor;
public void update(int cursor) {
this.cursor = cursor;
}
public int index() {
return index[cursor];
}
public double get() {
return data[cursor];
}
public void set(double value) {
data[cursor] = value;
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("%d\t%d\n", new Object[] { capacity, count }));
for (VectorEntry ve : this)
if (ve.get() != 0)
sb.append(String.format("%d\t%f\n", new Object[] { ve.index(), ve.get() }));
return sb.toString();
}
/**
* @return a map of {index, data} of the sparse vector
*/
public Map<Integer, Double> toMap() {
Map<Integer, Double> map = new HashMap<>();
for (int i = 0; i < count; i++) {
int idx = index[i];
double val = data[i];
if (val != 0)
map.put(idx, val);
}
return map;
}
}
| 7,243 | 18.846575 | 90 | java |
librec | librec-master/librec/src/main/java/librec/data/SVD.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import librec.util.Maths;
/**
* <strong>Singular Value Decomposition: adapted from the JAMA implementations</strong><br>
*
* <p>
* For an m-by-n matrix A with m >= n, the singular value decomposition is an m-by-n orthogonal matrix U, an n-by-n
* diagonal matrix S, and an n-by-n orthogonal matrix V so that A = U*S*V'. Note that this implementation requires m>=n.
* Otherwise, you'd better use the transpose of a matrix.
* </p>
* <p>
* The singular values, sigma[k] = S[k][k], are ordered so that sigma[0] >= sigma[1] >= ... >= sigma[n-1].
* </p>
*/
public class SVD {
/**
* Arrays for internal storage of U and V.
*/
private double[][] U, V;
/**
* Array for internal storage of singular values.
*/
private double[] sigma;
/**
* Row and column dimensions.
*/
private int m, n;
/**
* Construct the singular value decomposition Structure to access U, S and V.
*
* @param mat
* Rectangular matrix
*/
public SVD(DenseMatrix mat) {
// Derived from LINPACK code.
// Initialize.
DenseMatrix matClone = mat.clone();
double[][] A = matClone.data;
m = matClone.numRows;
n = matClone.numColumns;
/*
* Apparently the failing cases are only a proper subset of (m<n), so let's not throw error.
*/
int nu = Math.min(m, n);
sigma = new double[Math.min(m + 1, n)];
U = new double[m][nu];
V = new double[n][n];
double[] e = new double[n];
double[] work = new double[m];
// Reduce A to bidiagonal form, storing the diagonal elements
// in s and the super-diagonal elements in e.
int nct = Math.min(m - 1, n);
int nrt = Math.max(0, Math.min(n - 2, m));
for (int k = 0; k < Math.max(nct, nrt); k++) {
if (k < nct) {
// Compute the transformation for the k-th column and
// place the k-th diagonal in s[k].
// Compute 2-norm of k-th column without under/overflow.
sigma[k] = 0;
for (int i = k; i < m; i++) {
sigma[k] = Maths.hypot(sigma[k], A[i][k]);
}
if (sigma[k] != 0.0) {
if (A[k][k] < 0.0) {
sigma[k] = -sigma[k];
}
for (int i = k; i < m; i++) {
A[i][k] /= sigma[k];
}
A[k][k] += 1.0;
}
sigma[k] = -sigma[k];
}
for (int j = k + 1; j < n; j++) {
if ((k < nct) & (sigma[k] != 0.0)) {
// Apply the transformation.
double t = 0;
for (int i = k; i < m; i++) {
t += A[i][k] * A[i][j];
}
t = -t / A[k][k];
for (int i = k; i < m; i++) {
A[i][j] += t * A[i][k];
}
}
// Place the k-th row of A into e for the
// subsequent calculation of the row transformation.
e[j] = A[k][j];
}
if (k < nct) {
// Place the transformation in U for subsequent back
// multiplication.
for (int i = k; i < m; i++) {
U[i][k] = A[i][k];
}
}
if (k < nrt) {
// Compute the k-th row transformation and place the
// k-th super-diagonal in e[k].
// Compute 2-norm without under/overflow.
e[k] = 0;
for (int i = k + 1; i < n; i++) {
e[k] = Maths.hypot(e[k], e[i]);
}
if (e[k] != 0.0) {
if (e[k + 1] < 0.0) {
e[k] = -e[k];
}
for (int i = k + 1; i < n; i++) {
e[i] /= e[k];
}
e[k + 1] += 1.0;
}
e[k] = -e[k];
if ((k + 1 < m) & (e[k] != 0.0)) {
// Apply the transformation.
for (int i = k + 1; i < m; i++) {
work[i] = 0.0;
}
for (int j = k + 1; j < n; j++) {
for (int i = k + 1; i < m; i++) {
work[i] += e[j] * A[i][j];
}
}
for (int j = k + 1; j < n; j++) {
double t = -e[j] / e[k + 1];
for (int i = k + 1; i < m; i++) {
A[i][j] += t * work[i];
}
}
}
// Place the transformation in V for subsequent
// back multiplication.
for (int i = k + 1; i < n; i++) {
V[i][k] = e[i];
}
}
}
// Set up the final bidiagonal matrix or order p.
int p = Math.min(n, m + 1);
if (nct < n) {
sigma[nct] = A[nct][nct];
}
if (m < p) {
sigma[p - 1] = 0.0;
}
if (nrt + 1 < p) {
e[nrt] = A[nrt][p - 1];
}
e[p - 1] = 0.0;
// Generate U
for (int j = nct; j < nu; j++) {
for (int i = 0; i < m; i++) {
U[i][j] = 0.0;
}
U[j][j] = 1.0;
}
for (int k = nct - 1; k >= 0; k--) {
if (sigma[k] != 0.0) {
for (int j = k + 1; j < nu; j++) {
double t = 0;
for (int i = k; i < m; i++) {
t += U[i][k] * U[i][j];
}
t = -t / U[k][k];
for (int i = k; i < m; i++) {
U[i][j] += t * U[i][k];
}
}
for (int i = k; i < m; i++) {
U[i][k] = -U[i][k];
}
U[k][k] = 1.0 + U[k][k];
for (int i = 0; i < k - 1; i++) {
U[i][k] = 0.0;
}
} else {
for (int i = 0; i < m; i++) {
U[i][k] = 0.0;
}
U[k][k] = 1.0;
}
}
// Generate V
for (int k = n - 1; k >= 0; k--) {
if ((k < nrt) & (e[k] != 0.0)) {
for (int j = k + 1; j < nu; j++) {
double t = 0;
for (int i = k + 1; i < n; i++) {
t += V[i][k] * V[i][j];
}
t = -t / V[k + 1][k];
for (int i = k + 1; i < n; i++) {
V[i][j] += t * V[i][k];
}
}
}
for (int i = 0; i < n; i++) {
V[i][k] = 0.0;
}
V[k][k] = 1.0;
}
// Main iteration loop for the singular values.
int pp = p - 1;
int iter = 0;
double eps = Math.pow(2.0, -52.0);
double tiny = Math.pow(2.0, -966.0);
while (p > 0) {
int k, kase;
// Here is where a test for too many iterations would go.
// This section of the program inspects for
// negligible elements in the s and e arrays. On
// completion the variables kase and k are set as follows.
// kase = 1 if s(p) and e[k-1] are negligible and k<p
// kase = 2 if s(k) is negligible and k<p
// kase = 3 if e[k-1] is negligible, k<p, and
// s(k), ..., s(p) are not negligible (qr step).
// kase = 4 if e(p-1) is negligible (convergence).
for (k = p - 2; k >= -1; k--) {
if (k == -1) {
break;
}
if (Math.abs(e[k]) <= tiny + eps * (Math.abs(sigma[k]) + Math.abs(sigma[k + 1]))) {
e[k] = 0.0;
break;
}
}
if (k == p - 2) {
kase = 4;
} else {
int ks;
for (ks = p - 1; ks >= k; ks--) {
if (ks == k) {
break;
}
double t = (ks != p ? Math.abs(e[ks]) : 0.) + (ks != k + 1 ? Math.abs(e[ks - 1]) : 0.);
if (Math.abs(sigma[ks]) <= tiny + eps * t) {
sigma[ks] = 0.0;
break;
}
}
if (ks == k) {
kase = 3;
} else if (ks == p - 1) {
kase = 1;
} else {
kase = 2;
k = ks;
}
}
k++;
// Perform the task indicated by kase.
switch (kase) {
// Deflate negligible s(p).
case 1: {
double f = e[p - 2];
e[p - 2] = 0.0;
for (int j = p - 2; j >= k; j--) {
double t = Maths.hypot(sigma[j], f);
double cs = sigma[j] / t;
double sn = f / t;
sigma[j] = t;
if (j != k) {
f = -sn * e[j - 1];
e[j - 1] = cs * e[j - 1];
}
for (int i = 0; i < n; i++) {
t = cs * V[i][j] + sn * V[i][p - 1];
V[i][p - 1] = -sn * V[i][j] + cs * V[i][p - 1];
V[i][j] = t;
}
}
}
break;
// Split at negligible s(k).
case 2: {
double f = e[k - 1];
e[k - 1] = 0.0;
for (int j = k; j < p; j++) {
double t = Maths.hypot(sigma[j], f);
double cs = sigma[j] / t;
double sn = f / t;
sigma[j] = t;
f = -sn * e[j];
e[j] = cs * e[j];
for (int i = 0; i < m; i++) {
t = cs * U[i][j] + sn * U[i][k - 1];
U[i][k - 1] = -sn * U[i][j] + cs * U[i][k - 1];
U[i][j] = t;
}
}
}
break;
// Perform one qr step.
case 3: {
// Calculate the shift.
double scale = Math.max(Math.max(
Math.max(Math.max(Math.abs(sigma[p - 1]), Math.abs(sigma[p - 2])), Math.abs(e[p - 2])),
Math.abs(sigma[k])), Math.abs(e[k]));
double sp = sigma[p - 1] / scale;
double spm1 = sigma[p - 2] / scale;
double epm1 = e[p - 2] / scale;
double sk = sigma[k] / scale;
double ek = e[k] / scale;
double b = ((spm1 + sp) * (spm1 - sp) + epm1 * epm1) / 2.0;
double c = (sp * epm1) * (sp * epm1);
double shift = 0.0;
if ((b != 0.0) | (c != 0.0)) {
shift = Math.sqrt(b * b + c);
if (b < 0.0) {
shift = -shift;
}
shift = c / (b + shift);
}
double f = (sk + sp) * (sk - sp) + shift;
double g = sk * ek;
// Chase zeros.
for (int j = k; j < p - 1; j++) {
double t = Maths.hypot(f, g);
double cs = f / t;
double sn = g / t;
if (j != k) {
e[j - 1] = t;
}
f = cs * sigma[j] + sn * e[j];
e[j] = cs * e[j] - sn * sigma[j];
g = sn * sigma[j + 1];
sigma[j + 1] = cs * sigma[j + 1];
for (int i = 0; i < n; i++) {
t = cs * V[i][j] + sn * V[i][j + 1];
V[i][j + 1] = -sn * V[i][j] + cs * V[i][j + 1];
V[i][j] = t;
}
t = Maths.hypot(f, g);
cs = f / t;
sn = g / t;
sigma[j] = t;
f = cs * e[j] + sn * sigma[j + 1];
sigma[j + 1] = -sn * e[j] + cs * sigma[j + 1];
g = sn * e[j + 1];
e[j + 1] = cs * e[j + 1];
if (j < m - 1) {
for (int i = 0; i < m; i++) {
t = cs * U[i][j] + sn * U[i][j + 1];
U[i][j + 1] = -sn * U[i][j] + cs * U[i][j + 1];
U[i][j] = t;
}
}
}
e[p - 2] = f;
iter = iter + 1;
}
break;
// Convergence.
case 4: {
// Make the singular values positive.
if (sigma[k] <= 0.0) {
sigma[k] = (sigma[k] < 0.0 ? -sigma[k] : 0.0);
for (int i = 0; i <= pp; i++) {
V[i][k] = -V[i][k];
}
}
// Order the singular values.
while (k < pp) {
if (sigma[k] >= sigma[k + 1]) {
break;
}
double t = sigma[k];
sigma[k] = sigma[k + 1];
sigma[k + 1] = t;
if (k < n - 1) {
for (int i = 0; i < n; i++) {
t = V[i][k + 1];
V[i][k + 1] = V[i][k];
V[i][k] = t;
}
}
if (k < m - 1) {
for (int i = 0; i < m; i++) {
t = U[i][k + 1];
U[i][k + 1] = U[i][k];
U[i][k] = t;
}
}
k++;
}
iter = 0;
p--;
}
break;
}
}
}
/**
* Return the left singular vectors
*
* @return U
*/
public DenseMatrix getU() {
return new DenseMatrix(U, m, Math.min(m + 1, n));
}
/**
* Return the right singular vectors
*
* @return V
*/
public DenseMatrix getV() {
return new DenseMatrix(V, n, n);
}
/**
* Return the one-dimensional array of singular values
*
* @return diagonal of S.
*/
public double[] getSingularValues() {
return sigma;
}
/**
* Return the diagonal matrix of singular values
*
* @return S
*/
public DenseMatrix getS() {
DenseMatrix res = new DenseMatrix(n, n);
for (int i = 0; i < n; i++) {
res.set(i, i, sigma[i]);
}
return res;
}
/**
* Two norm
*
* @return max(S)
*/
public double norm2() {
return sigma[0];
}
/**
* Two norm condition number
*
* @return max(S)/min(S)
*/
public double cond() {
return sigma[0] / sigma[Math.min(m, n) - 1];
}
/**
* Effective numerical matrix rank
*
* @return Number of nonnegligible singular values.
*/
public int rank() {
double eps = Math.pow(2.0, -52.0);
double tol = Math.max(m, n) * sigma[0] * eps;
int r = 0;
for (int i = 0; i < sigma.length; i++) {
if (sigma[i] > tol) {
r++;
}
}
return r;
}
}
| 12,101 | 21.369686 | 120 | java |
librec | librec-master/librec/src/main/java/librec/data/TensorEntry.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
/**
* An entry of a tensor.
*/
public interface TensorEntry {
/**
* @return the index of dimension d
*/
int key(int d);
/**
* @return entry keys
*/
int[] keys();
/**
* @return the value at the current index
*/
double get();
/**
* remove current entry
*/
void remove();
/**
* Sets the value at the current index
*/
void set(double value);
}
| 1,103 | 20.230769 | 71 | java |
librec | librec-master/librec/src/main/java/librec/data/DataConvertor.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.BufferedReader;
import java.util.ArrayList;
import java.util.List;
import librec.util.FileIO;
/**
* A data convertor class to convert a data file from one source format to a target format (i.e., our supporting format)
* of {@code UserId ItemId Rating}, separted by " \t,"<br>
*
* @author guoguibing
*
*/
public class DataConvertor {
// pathes to source, target files
private String sourcePath, targetPath;
/**
* @param sourcePath
* path to the source file
* @param targetPath
* path to the target file
*/
public DataConvertor(String sourcePath, String targetPath) throws Exception {
this.sourcePath = sourcePath;
this.targetPath = targetPath;
// clean the target file
FileIO.deleteFile(targetPath);
}
/**
* Convert a data file separated by {@code sep} to a data file separated by {@code toSep}
*
* @param sep
* separator of the source file
* @param toSep
* separtor of the target file
*/
public void cvtSeparator(String sep, String toSep) throws Exception {
BufferedReader br = FileIO.getReader(sourcePath);
String line = null;
List<String> lines = new ArrayList<>();
while ((line = br.readLine()) != null) {
String newline = line.replaceAll(sep, toSep);
lines.add(newline);
if (lines.size() >= 1000) {
FileIO.writeList(targetPath, lines, true);
lines.clear();
}
}
if (lines.size() > 0)
FileIO.writeList(targetPath, lines, true);
br.close();
}
/**
* <p>
* Source File Format:<br>
* First Line: {@code UserID Sep #Ratings}<br>
* Other Lines: {@code ItemID Sep2 Rating}<br>
* </p>
*
* @param sep1
* the separtor of the first line
* @param sep2
* the separtor of the other lines
*
*/
public void cvtFirstLines(String sep1, String sep2) throws Exception {
BufferedReader br = FileIO.getReader(sourcePath);
String line = null, userId = null;
List<String> lines = new ArrayList<>();
while ((line = br.readLine()) != null) {
String[] vals = line.split(sep1);
if (vals.length > 1) {
// first line
userId = line.split(sep1)[0];
} else if ((vals = line.split(sep2)).length > 1) {
// other lines: for the train data set
String newLine = userId + " " + vals[0] + " " + vals[1];
lines.add(newLine);
} else {
// other lines: for the test data set
String newLine = userId + " " + vals[0];
lines.add(newLine);
}
if (lines.size() >= 1000) {
FileIO.writeList(targetPath, lines, true);
lines.clear();
}
}
if (lines.size() > 0)
FileIO.writeList(targetPath, lines, true);
br.close();
}
public void setSourcePath(String sourcePath) {
this.sourcePath = sourcePath;
}
public void setTargetPath(String targetPath) {
this.targetPath = targetPath;
}
}
| 3,552 | 24.561151 | 120 | java |
librec | librec-master/librec/src/main/java/librec/data/SymmMatrix.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.Serializable;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* Data Structure, Lower Symmetric Matrix
*
* @author guoguibing
*
*/
public class SymmMatrix implements Serializable {
private static final long serialVersionUID = -6138247653152029007L;
// matrix dimension
protected int dim;
// matrix data
Table<Integer, Integer, Double> data;
/**
* Construct a symmetric matrix
*/
public SymmMatrix(int dim) {
this.dim = dim;
data = HashBasedTable.create(); // do not specify the size here as a sparse matrix
}
/**
* Construct a symmetric matrix by deeply copying data from a given matrix
*/
public SymmMatrix(SymmMatrix mat) {
dim = mat.dim;
data = HashBasedTable.create(mat.data);
}
/**
* Make a deep copy of current matrix
*/
public SymmMatrix clone() {
return new SymmMatrix(this);
}
/**
* Get a value at entry (row, col)
*/
public double get(int row, int col) {
if (data.contains(row, col))
return data.get(row, col);
else if (data.contains(col, row))
return data.get(col, row);
return 0.0;
}
/**
* set a value to entry (row, col)
*/
public void set(int row, int col, double val) {
if (row >= col)
data.put(row, col, val);
else
data.put(col, row, val);
}
/**
* add a value to entry (row, col)
*/
public void add(int row, int col, double val) {
if (row >= col)
data.put(row, col, val + get(row, col));
else
data.put(col, row, val + get(col, row));
}
/**
* Retrieve a complete row of similar items
*/
public SparseVector row(int row) {
SparseVector res = new SparseVector(dim);
for (int col = 0; col < dim; col++) {
double val = get(row, col);
if (val != 0)
res.set(col, val);
}
return res;
}
@Override
public String toString() {
return "Dimension: " + dim + " x " + dim + "\n" + data.toString();
}
}
| 2,628 | 21.470085 | 84 | java |
librec | librec-master/librec/src/main/java/librec/data/SparseMatrix.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import librec.util.Stats;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Table;
import com.google.common.collect.Table.Cell;
/**
* Data Structure: Sparse Matrix whose implementation is modified from M4J library
*
* <ul>
* <li><a href="http://netlib.org/linalg/html_templates/node91.html">Compressed Row Storage (CRS)</a></li>
* <li><a href="http://netlib.org/linalg/html_templates/node92.html">Compressed Col Storage (CCS)</a></li>
* </ul>
*
* @author guoguibing
*
*/
public class SparseMatrix implements Iterable<MatrixEntry>, Serializable {
private static final long serialVersionUID = 8024536511172609539L;
// matrix dimension
protected int numRows, numColumns;
// Compressed Row Storage (CRS)
protected double[] rowData;
protected int[] rowPtr, colInd;
// Compressed Col Storage (CCS)
protected double[] colData;
protected int[] colPtr, rowInd;
/**
* Construct a sparse matrix with both CRS and CCS structures
*/
public SparseMatrix(int rows, int cols, Table<Integer, Integer, ? extends Number> dataTable,
Multimap<Integer, Integer> colMap) {
numRows = rows;
numColumns = cols;
construct(dataTable, colMap);
}
/**
* Construct a sparse matrix with only CRS structures
*/
public SparseMatrix(int rows, int cols, Table<Integer, Integer, ? extends Number> dataTable) {
this(rows, cols, dataTable, null);
}
/**
* Define a sparse matrix without data, only use for {@code transpose} method
*
*/
private SparseMatrix(int rows, int cols) {
numRows = rows;
numColumns = cols;
}
/**
* Construct a sparse matrix from another sparse matrix
*
* @param mat
* the original sparse matrix
* @param deap
* whether to copy the CCS structures
*/
public SparseMatrix(SparseMatrix mat) {
numRows = mat.numRows;
numColumns = mat.numColumns;
copyCRS(mat.rowData, mat.rowPtr, mat.colInd);
copyCCS(mat.colData, mat.colPtr, mat.rowInd);
}
private void copyCRS(double[] data, int[] ptr, int[] idx) {
rowData = new double[data.length];
for (int i = 0; i < rowData.length; i++)
rowData[i] = data[i];
rowPtr = new int[ptr.length];
for (int i = 0; i < rowPtr.length; i++)
rowPtr[i] = ptr[i];
colInd = new int[idx.length];
for (int i = 0; i < colInd.length; i++)
colInd[i] = idx[i];
}
private void copyCCS(double[] data, int[] ptr, int[] idx) {
colData = new double[data.length];
for (int i = 0; i < colData.length; i++)
colData[i] = data[i];
colPtr = new int[ptr.length];
for (int i = 0; i < colPtr.length; i++)
colPtr[i] = ptr[i];
rowInd = new int[idx.length];
for (int i = 0; i < rowInd.length; i++)
rowInd[i] = idx[i];
}
/**
* Make a deep clone of current matrix
*/
public SparseMatrix clone() {
return new SparseMatrix(this);
}
/**
* @return the transpose of current matrix
*/
public SparseMatrix transpose() {
SparseMatrix tr = new SparseMatrix(numColumns, numRows);
tr.copyCRS(this.rowData, this.rowPtr, this.colInd);
tr.copyCCS(this.colData, this.colPtr, this.rowInd);
return tr;
}
/**
* @return the row pointers of CRS structure
*/
public int[] getRowPointers() {
return rowPtr;
}
/**
* @return the column indices of CCS structure
*/
public int[] getColumnIndices() {
return colInd;
}
/**
* @return the cardinary of current matrix
*/
public int size() {
int size = 0;
for (MatrixEntry me : this)
if (me.get() != 0)
size++;
return size;
}
/**
* @return the data table of this matrix as (row, column, value) cells
*/
public Table<Integer, Integer, Double> getDataTable() {
Table<Integer, Integer, Double> res = HashBasedTable.create();
for (MatrixEntry me : this) {
if (me.get() != 0)
res.put(me.row(), me.column(), me.get());
}
return res;
}
/**
* Construct a sparse matrix
*
* @param dataTable
* data table
* @param columnStructure
* column structure
*/
private void construct(Table<Integer, Integer, ? extends Number> dataTable,
Multimap<Integer, Integer> columnStructure) {
int nnz = dataTable.size();
// CRS
rowPtr = new int[numRows + 1];
colInd = new int[nnz];
rowData = new double[nnz];
int j = 0;
for (int i = 1; i <= numRows; ++i) {
Set<Integer> cols = dataTable.row(i - 1).keySet();
rowPtr[i] = rowPtr[i - 1] + cols.size();
for (int col : cols) {
colInd[j++] = col;
if (col < 0 || col >= numColumns)
throw new IllegalArgumentException("colInd[" + j + "]=" + col
+ ", which is not a valid column index");
}
Arrays.sort(colInd, rowPtr[i - 1], rowPtr[i]);
}
// CCS
colPtr = new int[numColumns + 1];
rowInd = new int[nnz];
colData = new double[nnz];
j = 0;
for (int i = 1; i <= numColumns; ++i) {
// dataTable.col(i-1) is more time-consuming than columnStructure.get(i-1)
Collection<Integer> rows = columnStructure != null ? columnStructure.get(i - 1) : dataTable.column(i - 1)
.keySet();
colPtr[i] = colPtr[i - 1] + rows.size();
for (int row : rows) {
rowInd[j++] = row;
if (row < 0 || row >= numRows)
throw new IllegalArgumentException("rowInd[" + j + "]=" + row + ", which is not a valid row index");
}
Arrays.sort(rowInd, colPtr[i - 1], colPtr[i]);
}
// set data
for (Cell<Integer, Integer, ? extends Number> en : dataTable.cellSet()) {
int row = en.getRowKey();
int col = en.getColumnKey();
double val = en.getValue().doubleValue();
set(row, col, val);
}
}
/**
* @return number of rows
*/
public int numRows() {
return numRows;
}
/**
* @return number of columns
*/
public int numColumns() {
return numColumns;
}
/**
* @return referce to the data of current matrix
*/
public double[] getData() {
return rowData;
}
/**
* Set a value to entry [row, column]
*
* @param row
* row id
* @param column
* column id
* @param val
* value to set
*/
public void set(int row, int column, double val) {
int index = getCRSIndex(row, column);
rowData[index] = val;
index = getCCSIndex(row, column);
colData[index] = val;
}
/**
* Add a value to entry [row, column]
*
* @param row
* row id
* @param column
* column id
* @param val
* value to add
*/
public void add(int row, int column, double val) {
int index = getCRSIndex(row, column);
rowData[index] += val;
index = getCCSIndex(row, column);
colData[index] += val;
}
/**
* Retrieve value at entry [row, column]
*
* @param row
* row id
* @param column
* column id
* @return value at entry [row, column]
*/
public double get(int row, int column) {
int index = Arrays.binarySearch(colInd, rowPtr[row], rowPtr[row + 1], column);
if (index >= 0)
return rowData[index];
else
return 0;
}
/**
* get a row sparse vector of a matrix
*
* @param row
* row id
* @return a sparse vector of {index, value}
*
*/
public SparseVector row(int row) {
SparseVector sv = new SparseVector(numColumns);
if (row < numRows) {
for (int j = rowPtr[row]; j < rowPtr[row + 1]; j++) {
int col = colInd[j];
double val = get(row, col);
if (val != 0.0)
sv.set(col, val);
}
} // return an empty vector if the row does not exist in training matrix
return sv;
}
/**
* get columns of a specific row where (row, column) entries are non-zero
*
* @param row
* row id
* @return a list of column index
*/
public List<Integer> getColumns(int row) {
List<Integer> res = new ArrayList<>();
if (row < numRows) {
for (int j = rowPtr[row]; j < rowPtr[row + 1]; j++) {
int col = colInd[j];
double val = get(row, col);
if (val != 0.0)
res.add(col);
}
}
return res;
}
/**
* create a row cache of a matrix in {row, row-specific vector}
*
* @param cacheSpec
* cache specification
* @return a matrix row cache in {row, row-specific vector}
*/
public LoadingCache<Integer, SparseVector> rowCache(String cacheSpec) {
LoadingCache<Integer, SparseVector> cache = CacheBuilder.from(cacheSpec).build(
new CacheLoader<Integer, SparseVector>() {
@Override
public SparseVector load(Integer rowId) throws Exception {
return row(rowId);
}
});
return cache;
}
/**
* create a row cache of a matrix in {row, row-specific columns}
*
* @param cacheSpec
* cache specification
* @return a matrix row cache in {row, row-specific columns}
*/
public LoadingCache<Integer, List<Integer>> rowColumnsCache(String cacheSpec) {
LoadingCache<Integer, List<Integer>> cache = CacheBuilder.from(cacheSpec).build(
new CacheLoader<Integer, List<Integer>>() {
@Override
public List<Integer> load(Integer rowId) throws Exception {
return getColumns(rowId);
}
});
return cache;
}
/**
* create a column cache of a matrix
*
* @param cacheSpec
* cache specification
* @return a matrix column cache
*/
public LoadingCache<Integer, SparseVector> columnCache(String cacheSpec) {
LoadingCache<Integer, SparseVector> cache = CacheBuilder.from(cacheSpec).build(
new CacheLoader<Integer, SparseVector>() {
@Override
public SparseVector load(Integer columnId) throws Exception {
return column(columnId);
}
});
return cache;
}
/**
* create a row cache of a matrix in {row, row-specific columns}
*
* @param cacheSpec
* cache specification
* @return a matrix row cache in {row, row-specific columns}
*/
public LoadingCache<Integer, List<Integer>> columnRowsCache(String cacheSpec) {
LoadingCache<Integer, List<Integer>> cache = CacheBuilder.from(cacheSpec).build(
new CacheLoader<Integer, List<Integer>>() {
@Override
public List<Integer> load(Integer colId) throws Exception {
return getRows(colId);
}
});
return cache;
}
/**
* get a row sparse vector of a matrix
*
* @param row
* row id
* @param except
* row id to be excluded
* @return a sparse vector of {index, value}
*
*/
public SparseVector row(int row, int except) {
SparseVector sv = new SparseVector(numColumns);
for (int j = rowPtr[row]; j < rowPtr[row + 1]; j++) {
int col = colInd[j];
if (col != except) {
double val = get(row, col);
if (val != 0.0)
sv.set(col, val);
}
}
return sv;
}
/**
* query the size of a specific row
*
* @param row
* row id
* @return the size of non-zero elements of a row
*/
public int rowSize(int row) {
int size = 0;
for (int j = rowPtr[row]; j < rowPtr[row + 1]; j++) {
int col = colInd[j];
if (get(row, col) != 0.0)
size++;
}
return size;
}
/**
* @return a list of rows which have at least one non-empty entry
*/
public List<Integer> rows() {
List<Integer> list = new ArrayList<>();
for (int row = 0; row < numRows; row++) {
for (int j = rowPtr[row]; j < rowPtr[row + 1]; j++) {
int col = colInd[j];
if (get(row, col) != 0.0) {
list.add(row);
break;
}
}
}
return list;
}
/**
* get a col sparse vector of a matrix
*
* @param col
* col id
* @return a sparse vector of {index, value}
*
*/
public SparseVector column(int col) {
SparseVector sv = new SparseVector(numRows);
if (col < numColumns) {
for (int j = colPtr[col]; j < colPtr[col + 1]; j++) {
int row = rowInd[j];
double val = get(row, col);
if (val != 0.0)
sv.set(row, val);
}
} // return an empty vector if the column does not exist in training
// matrix
return sv;
}
/**
* query the size of a specific col
*
* @param col
* col id
* @return the size of non-zero elements of a row
*/
public int columnSize(int col) {
int size = 0;
for (int j = colPtr[col]; j < colPtr[col + 1]; j++) {
int row = rowInd[j];
double val = get(row, col);
if (val != 0.0)
size++;
}
return size;
}
/**
* get rows of a specific column where (row, column) entries are non-zero
*
* @param col
* column id
* @return a list of column index
*/
public List<Integer> getRows(int col) {
List<Integer> res = new ArrayList<>();
if (col < numColumns) {
for (int j = colPtr[col]; j < colPtr[col + 1]; j++) {
int row = rowInd[j];
double val = get(row, col);
if (val != 0.0)
res.add(row);
}
}
return res;
}
/**
* @return a list of columns which have at least one non-empty entry
*/
public List<Integer> columns() {
List<Integer> list = new ArrayList<>();
for (int col = 0; col < numColumns; col++) {
for (int j = colPtr[col]; j < colPtr[col + 1]; j++) {
int row = rowInd[j];
double val = get(row, col);
if (val != 0.0) {
list.add(col);
break;
}
}
}
return list;
}
/**
* @return sum of matrix data
*/
public double sum() {
return Stats.sum(rowData);
}
/**
* @return mean of matrix data
*/
public double mean() {
return sum() / size();
}
/**
* Normalize the matrix entries to (0, 1) by (x-min)/(max-min)
*
* @param min
* minimum value
* @param max
* maximum value
*/
public void normalize(double min, double max) {
assert max > min;
for (MatrixEntry me : this) {
double entry = me.get();
if (entry != 0)
me.set((entry - min) / (max - min));
}
}
/**
* Normalize the matrix entries to (0, 1) by (x/max)
*
* @param max
* maximum value
*/
public void normalize(double max) {
normalize(0, max);
}
/**
* Standardize the matrix entries by row- or column-wise z-scores (z=(x-u)/sigma)
*
* @param isByRow
* standardize by row if true; otherwise by column
*/
public void standardize(boolean isByRow) {
int iters = isByRow ? numRows : numColumns;
for (int iter = 0; iter < iters; iter++) {
SparseVector vec = isByRow ? row(iter) : column(iter);
if (vec.getCount() > 0) {
double[] data = vec.getData();
double mu = Stats.mean(data);
double sigma = Stats.sd(data, mu);
for (VectorEntry ve : vec) {
int idx = ve.index();
double val = ve.get();
double z = (val - mu) / sigma;
if (isByRow)
this.set(iter, idx, z);
else
this.set(idx, iter, z);
}
}
}
}
/**
* remove zero entries of the given matrix
*/
public static void reshape(SparseMatrix mat) {
SparseMatrix res = new SparseMatrix(mat.numRows, mat.numColumns);
int nnz = mat.size();
// Compressed Row Storage (CRS)
res.rowData = new double[nnz];
res.colInd = new int[nnz];
res.rowPtr = new int[mat.numRows + 1];
// handle row data
int index = 0;
for (int i = 1; i < mat.rowPtr.length; i++) {
for (int j = mat.rowPtr[i - 1]; j < mat.rowPtr[i]; j++) {
// row i-1, row 0 always starts with 0
double val = mat.rowData[j];
int col = mat.colInd[j];
if (val != 0) {
res.rowData[index] = val;
res.colInd[index] = col;
index++;
}
}
res.rowPtr[i] = index;
}
// Compressed Col Storage (CCS)
res.colData = new double[nnz];
res.rowInd = new int[nnz];
res.colPtr = new int[mat.numColumns + 1];
// handle column data
index = 0;
for (int j = 1; j < mat.colPtr.length; j++) {
for (int i = mat.colPtr[j - 1]; i < mat.colPtr[j]; i++) {
// column j-1, index i
double val = mat.colData[i];
int row = mat.rowInd[i];
if (val != 0) {
res.colData[index] = val;
res.rowInd[index] = row;
index++;
}
}
res.colPtr[j] = index;
}
// write back to the given matrix, note that here mat is just a reference copy of the original matrix
mat.rowData = res.rowData;
mat.colInd = res.colInd;
mat.rowPtr = res.rowPtr;
mat.colData = res.colData;
mat.rowInd = res.rowInd;
mat.colPtr = res.colPtr;
}
/**
* @return a new matrix with shape (rows, cols) with data from the current matrix
*/
public SparseMatrix reshape(int rows, int cols) {
Table<Integer, Integer, Double> data = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
int rowIndex, colIndex;
for (int i = 1; i < rowPtr.length; i++) {
for (int j = rowPtr[i - 1]; j < rowPtr[i]; j++) {
int row = i - 1;
int col = colInd[j];
double val = rowData[j]; // (row, col, val)
if (val != 0) {
int oldIndex = row * numColumns + col;
rowIndex = oldIndex / cols;
colIndex = oldIndex % cols;
data.put(rowIndex, colIndex, val);
colMap.put(colIndex, rowIndex);
}
}
}
return new SparseMatrix(rows, cols, data, colMap);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("%d\t%d\t%d\n", new Object[] { numRows, numColumns, size() }));
for (MatrixEntry me : this)
if (me.get() != 0)
sb.append(String.format("%d\t%d\t%f\n", new Object[] { me.row(), me.column(), me.get() }));
return sb.toString();
}
/**
* @return a matrix format string
*/
public String matString() {
StringBuilder sb = new StringBuilder();
sb.append("Dimension: ").append(numRows).append(" x ").append(numColumns).append("\n");
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < numColumns; j++) {
sb.append(get(i, j));
if (j < numColumns - 1)
sb.append("\t");
}
sb.append("\n");
}
return sb.toString();
}
/**
* Finds the insertion index of CRS
*/
private int getCRSIndex(int row, int col) {
int i = Arrays.binarySearch(colInd, rowPtr[row], rowPtr[row + 1], col);
if (i >= 0 && colInd[i] == col)
return i;
else
throw new IndexOutOfBoundsException("Entry (" + (row + 1) + ", " + (col + 1)
+ ") is not in the matrix structure");
}
/**
* Finds the insertion index of CCS
*/
private int getCCSIndex(int row, int col) {
int i = Arrays.binarySearch(rowInd, colPtr[col], colPtr[col + 1], row);
if (i >= 0 && rowInd[i] == row)
return i;
else
throw new IndexOutOfBoundsException("Entry (" + (row + 1) + ", " + (col + 1)
+ ") is not in the matrix structure");
}
public Iterator<MatrixEntry> iterator() {
return new MatrixIterator();
}
/**
* Entry of a compressed row matrix
*/
private class SparseMatrixEntry implements MatrixEntry {
private int row, cursor;
/**
* Updates the entry
*/
public void update(int row, int cursor) {
this.row = row;
this.cursor = cursor;
}
public int row() {
return row;
}
public int column() {
return colInd[cursor];
}
public double get() {
return rowData[cursor];
}
public void set(double value) {
rowData[cursor] = value;
}
}
private class MatrixIterator implements Iterator<MatrixEntry> {
private int row, cursor;
private SparseMatrixEntry entry = new SparseMatrixEntry();
public MatrixIterator() {
// Find first non-empty row
nextNonEmptyRow();
}
/**
* Locates the first non-empty row, starting at the current. After the new row has been found, the cursor is
* also updated
*/
private void nextNonEmptyRow() {
while (row < numRows && rowPtr[row] == rowPtr[row + 1])
row++;
cursor = rowPtr[row];
}
public boolean hasNext() {
return cursor < rowData.length;
}
public MatrixEntry next() {
entry.update(row, cursor);
// Next position is in the same row
if (cursor < rowPtr[row + 1] - 1)
cursor++;
// Next position is at the following (non-empty) row
else {
row++;
nextNonEmptyRow();
}
return entry;
}
public void remove() {
entry.set(0);
}
}
}
| 20,819 | 21.483801 | 110 | java |
librec | librec-master/librec/src/main/java/librec/data/UserContext.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.util.HashMap;
import java.util.Map;
/**
* User-related Contextual Information
*
* @author guoguibing
*
*/
public class UserContext extends Context {
/* user's social relations */
private Map<Integer, Double> socialMap;
/**
* @param user
* user id
* @param item
* fixed as -1
*/
private UserContext(int user, int item) {
super(user, -1);
}
public UserContext(int user) {
this(user, -1);
}
/**
* add social relations
*
* @param user
* user id socially related with
* @param val
* social strength with this user
*/
public void addSocial(int user, double val) {
if (socialMap == null)
socialMap = new HashMap<>();
socialMap.put(user, val);
}
/**
* get the strength of a social relation
*
* @param user
* user id
* @return social strength
*/
public double getSocial(int user) {
double val = Double.NaN;
if (socialMap != null && socialMap.containsKey(user))
return socialMap.get(user);
return val;
}
/**
* @return the socialMap
*/
public Map<Integer, Double> getSocialMap() {
return socialMap;
}
}
| 1,880 | 20.62069 | 71 | java |
librec | librec-master/librec/src/main/java/librec/data/DenseMatrix.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.Serializable;
import java.util.Arrays;
import librec.util.Randoms;
import librec.util.Strings;
/**
* Data Structure: dense matrix <br>
*
* A big reason that we do not adopt original DenseMatrix from M4J libraray is because the latter using one-dimensional
* array to store data, which will often cause OutOfMemory exception due to the limit of maximum length of a
* one-dimensional Java array.
*
* @author guoguibing
*
*/
public class DenseMatrix implements Serializable {
private static final long serialVersionUID = -2069621030647530185L;
// dimension
protected int numRows, numColumns;
// read data
protected double[][] data;
/**
* Construct a dense matrix with specified dimensions
*
* @param numRows
* number of rows
* @param numColumns
* number of columns
*/
public DenseMatrix(int numRows, int numColumns) {
this.numRows = numRows;
this.numColumns = numColumns;
data = new double[numRows][numColumns];
}
/**
* Construct a dense matrix by copying data from a given 2D array
*
* @param array
* data array
*/
public DenseMatrix(double[][] array) {
this(array.length, array[0].length);
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
data[i][j] = array[i][j];
}
/**
* Construct a dense matrix by a shallow copy of a data array
*/
public DenseMatrix(double[][] array, int numRows, int numColumns) {
this.numRows = numRows;
this.numColumns = numColumns;
this.data = array;
}
/**
* Construct a dense matrix by copying data from a given matrix
*
* @param mat
* input matrix
*/
public DenseMatrix(DenseMatrix mat) {
this(mat.data);
}
/**
* Make a deep copy of current matrix
*/
public DenseMatrix clone() {
return new DenseMatrix(this);
}
/**
* Construct an identity matrix
*
* @param dim
* dimension
* @return an identity matrix
*/
public static DenseMatrix eye(int dim) {
DenseMatrix mat = new DenseMatrix(dim, dim);
for (int i = 0; i < mat.numRows; i++)
mat.set(i, i, 1.0);
return mat;
}
/**
* Initialize a dense matrix with small Guassian values <br/>
*
* <strong>NOTE:</strong> small initial values make it easier to train a model; otherwise a very small learning rate
* may be needed (especially when the number of factors is large) which can cause bad performance.
*/
public void init(double mean, double sigma) {
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
data[i][j] = Randoms.gaussian(mean, sigma);
}
/**
* initialize a dense matrix with small random values in (0, range)
*/
public void init(double range) {
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
data[i][j] = Randoms.uniform(0, range);
}
/**
* initialize a dense matrix with small random values in (0, 1)
*/
public void init() {
init(1.0);
}
/**
* @return number of rows
*/
public int numRows() {
return numRows;
}
/**
* @return number of columns
*/
public int numColumns() {
return numColumns;
}
/**
* @param rowId
* row id
* @return a copy of row data as a dense vector
*/
public DenseVector row(int rowId) {
return row(rowId, true);
}
/**
*
* @param rowId
* row id
* @param deep
* whether to copy data or only shallow copy for executing speedup purpose
* @return a vector of a specific row
*/
public DenseVector row(int rowId, boolean deep) {
return new DenseVector(data[rowId], deep);
}
/**
* @param column
* column id
* @return a copy of column data as a dense vector
*/
public DenseVector column(int column) {
DenseVector vec = new DenseVector(numRows);
for (int i = 0; i < numRows; i++)
vec.set(i, data[i][column]);
return vec;
}
/**
* Compute mean of a column of the current matrix
*
* @param column
* column id
* @return mean of a column of the current matrix
*/
public double columnMean(int column) {
double sum = 0.0;
for (int i = 0; i < numRows; i++)
sum += data[i][column];
return sum / numRows;
}
/**
* @return the matrix norm-2
*/
public double norm() {
double res = 0;
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
res += data[i][j] * data[i][j];
return Math.sqrt(res);
}
/**
* row x row of two matrix
*
* @param m
* the first matrix
* @param mrow
* row of the first matrix
* @param n
* the second matrix
* @param nrow
* row of the second matrix
* @return inner product of two row vectors
*/
public static double rowMult(DenseMatrix m, int mrow, DenseMatrix n, int nrow) {
assert m.numColumns == n.numColumns;
double res = 0;
for (int j = 0, k = m.numColumns; j < k; j++)
res += m.get(mrow, j) * n.get(nrow, j);
return res;
}
/**
* column x column of two matrix
*
* @param m
* the first matrix
* @param mcol
* column of the first matrix
* @param n
* the second matrix
* @param ncol
* column of the second matrix
* @return inner product of two column vectors
*/
public static double colMult(DenseMatrix m, int mcol, DenseMatrix n, int ncol) {
assert m.numRows == n.numRows;
double res = 0;
for (int j = 0, k = m.numRows; j < k; j++)
res += m.get(j, mcol) * n.get(j, ncol);
return res;
}
/**
* dot product of row x col between two matrices
*
* @param m
* the first matrix
* @param mrow
* row id of the first matrix
* @param n
* the second matrix
* @param ncol
* column id of the second matrix
* @return dot product of row of the first matrix and column of the second matrix
*/
public static double product(DenseMatrix m, int mrow, DenseMatrix n, int ncol) {
assert m.numColumns == n.numRows;
double res = 0;
for (int j = 0; j < m.numColumns; j++)
res += m.get(mrow, j) * n.get(j, ncol);
return res;
}
/**
* @return Kronecker product of two arbitrary matrices
*/
public static DenseMatrix kroneckerProduct(DenseMatrix M, DenseMatrix N) {
DenseMatrix res = new DenseMatrix(M.numRows * N.numRows, M.numColumns * N.numColumns);
for (int i = 0; i < M.numRows; i++) {
for (int j = 0; j < M.numColumns; j++) {
double Mij = M.get(i, j);
// Mij*N
for (int ni = 0; ni < N.numRows; ni++) {
for (int nj = 0; nj < N.numColumns; nj++) {
int row = i * N.numRows + ni;
int col = j * N.numColumns + nj;
res.set(row, col, Mij * N.get(ni, nj));
}
}
}
}
return res;
}
/**
* @return Khatri-Rao product of two matrices
*/
public static DenseMatrix khatriRaoProduct(DenseMatrix M, DenseMatrix N) throws Exception {
if (M.numColumns != N.numColumns)
throw new Exception("The number of columns of two matrices is not equal!");
DenseMatrix res = new DenseMatrix(M.numRows * N.numRows, M.numColumns);
for (int j = 0; j < M.numColumns; j++) {
for (int i = 0; i < M.numRows; i++) {
double Mij = M.get(i, j);
// Mij* Nj
for (int ni = 0; ni < N.numRows; ni++) {
int row = ni + i * N.numRows;
res.set(row, j, Mij * N.get(ni, j));
}
}
}
return res;
}
/**
* @return Hadamard product of two matrices
*/
public static DenseMatrix hadamardProduct(DenseMatrix M, DenseMatrix N) throws Exception {
if (M.numRows != N.numRows || M.numColumns != N.numColumns)
throw new Exception("The dimensions of two matrices are not consistent!");
DenseMatrix res = new DenseMatrix(M.numRows, M.numColumns);
for (int i = 0; i < M.numRows; i++) {
for (int j = 0; j < M.numColumns; j++) {
res.set(i, j, M.get(i, j) * N.get(i, j));
}
}
return res;
}
/**
* @return the result of {@code A^T A}
*/
public DenseMatrix transMult() {
DenseMatrix res = new DenseMatrix(numColumns, numColumns);
for (int i = 0; i < numColumns; i++) {
// inner product of row i and row k
for (int k = 0; k < numColumns; k++) {
double val = 0;
for (int j = 0; j < numRows; j++) {
val += get(j, i) * get(j, k);
}
res.set(i, k, val);
}
}
return res;
}
/**
* Matrix multiplication with a dense matrix
*
* @param mat
* a dense matrix
* @return a dense matrix with results of matrix multiplication
*/
public DenseMatrix mult(DenseMatrix mat) {
assert this.numColumns == mat.numRows;
DenseMatrix res = new DenseMatrix(this.numRows, mat.numColumns);
for (int i = 0; i < res.numRows; i++) {
for (int j = 0; j < res.numColumns; j++) {
double product = 0;
for (int k = 0; k < this.numColumns; k++)
product += data[i][k] * mat.data[k][j];
res.set(i, j, product);
}
}
return res;
}
/**
* Matrix multiplication with a sparse matrix
*
* @param mat
* a sparse matrix
* @return a dense matrix with results of matrix multiplication
*/
public DenseMatrix mult(SparseMatrix mat) {
assert this.numColumns == mat.numRows;
DenseMatrix res = new DenseMatrix(this.numRows, mat.numColumns);
for (int j = 0; j < res.numColumns; j++) {
SparseVector col = mat.column(j); // only one-time computation
for (int i = 0; i < res.numRows; i++) {
double product = 0;
for (VectorEntry ve : col)
product += data[i][ve.index()] * ve.get();
res.set(i, j, product);
}
}
return res;
}
/**
* Do {@code matrix x vector} between current matrix and a given vector
*
* @return a dense vector with the results of {@code matrix x vector}
*/
public DenseVector mult(DenseVector vec) {
assert this.numColumns == vec.size;
DenseVector res = new DenseVector(this.numRows);
for (int i = 0; i < this.numRows; i++)
res.set(i, row(i, false).inner(vec));
return res;
}
public DenseVector mult(SparseVector vec) {
DenseVector res = new DenseVector(this.numRows);
for (int i = 0; i < this.numRows; i++) {
double product = 0;
for (VectorEntry ve : vec)
product += data[i][ve.index()] * ve.get();
res.set(i, product);
}
return res;
}
/**
* Matrix multiplication of a sparse matrix by a dense matrix
*
* @param sm
* a sparse matrix
* @param dm
* a dense matrix
* @return a dense matrix with the results of matrix multiplication
*/
public static DenseMatrix mult(SparseMatrix sm, DenseMatrix dm) {
assert sm.numColumns == dm.numRows;
DenseMatrix res = new DenseMatrix(sm.numRows, dm.numColumns);
for (int i = 0; i < res.numRows; i++) {
SparseVector row = sm.row(i);
for (int j = 0; j < res.numColumns; j++) {
double product = 0;
for (int k : row.getIndex())
product += row.get(k) * dm.data[k][j];
res.set(i, j, product);
}
}
return res;
}
/**
* Get the value at entry [row, column]
*/
public double get(int row, int column) {
return data[row][column];
}
/**
* Set a value to entry [row, column]
*/
public void set(int row, int column, double val) {
data[row][column] = val;
}
/**
* Set a value to all entries
*/
public void setAll(double val) {
for (int row = 0; row < numRows; row++) {
for (int col = 0; col < numColumns; col++) {
data[row][col] = val;
}
}
}
/**
* @return the sum of data entries in a row
*/
public double sumOfRow(int row) {
double res = 0;
for (int col = 0; col < numColumns; col++)
res += data[row][col];
return res;
}
/**
* @return the sum of data entries in a column
*/
public double sumOfColumn(int col) {
double res = 0;
for (int row = 0; row < numRows; row++)
res += data[row][col];
return res;
}
/**
* @return the sum of all data entries
*/
public double sum() {
double res = 0;
for (int row = 0; row < numRows; row++) {
for (int col = 0; col < numColumns; col++) {
res += data[row][col];
}
}
return res;
}
/**
* Add a value to entry [row, column]
*/
public void add(int row, int column, double val) {
data[row][column] += val;
}
/**
* @return a new matrix by scaling the current matrix
*/
public DenseMatrix scale(double val) {
DenseMatrix mat = new DenseMatrix(numRows, numColumns);
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
mat.data[i][j] = this.data[i][j] * val;
return mat;
}
/**
* Do {@code A + B} matrix operation
*
* @return a matrix with results of {@code C = A + B}
*/
public DenseMatrix add(DenseMatrix mat) {
assert numRows == mat.numRows;
assert numColumns == mat.numColumns;
DenseMatrix res = new DenseMatrix(numRows, numColumns);
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
res.data[i][j] = data[i][j] + mat.data[i][j];
return res;
}
/**
* Do {@code A + B} matrix operation
*
* @return a matrix with results of {@code C = A + B}
*/
public DenseMatrix add(SparseMatrix mat) {
assert numRows == mat.numRows;
assert numColumns == mat.numColumns;
DenseMatrix res = this.clone();
for (MatrixEntry me : mat)
res.add(me.row(), me.column(), me.get());
return res;
}
/**
* Do {@code A + c} matrix operation, where {@code c} is a constant. Each entries will be added by {@code c}
*
* @return a new matrix with results of {@code C = A + c}
*/
public DenseMatrix add(double val) {
DenseMatrix res = new DenseMatrix(numRows, numColumns);
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
res.data[i][j] = data[i][j] + val;
return res;
}
/**
* Do {@code A + B} matrix operation
*
* @return a matrix with results of {@code C = A + B}
*/
public DenseMatrix minus(DenseMatrix mat) {
assert numRows == mat.numRows;
assert numColumns == mat.numColumns;
DenseMatrix res = new DenseMatrix(numRows, numColumns);
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
res.data[i][j] = data[i][j] - mat.data[i][j];
return res;
}
/**
* Do {@code A + B} matrix operation
*
* @return a matrix with results of {@code C = A + B}
*/
public DenseMatrix minus(SparseMatrix mat) {
assert numRows == mat.numRows;
assert numColumns == mat.numColumns;
DenseMatrix res = this.clone();
for (MatrixEntry me : mat)
res.add(me.row(), me.column(), -me.get());
return res;
}
/**
* Do {@code A + c} matrix operation, where {@code c} is a constant. Each entries will be added by {@code c}
*
* @return a new matrix with results of {@code C = A + c}
*/
public DenseMatrix minus(double val) {
DenseMatrix res = new DenseMatrix(numRows, numColumns);
for (int i = 0; i < numRows; i++)
for (int j = 0; j < numColumns; j++)
res.data[i][j] = data[i][j] - val;
return res;
}
/**
* @return the Cholesky decomposition of the current matrix
*/
public DenseMatrix cholesky() {
if (this.numRows != this.numColumns)
throw new RuntimeException("Matrix is not square");
int n = numRows;
DenseMatrix L = new DenseMatrix(n, n);
for (int i = 0; i < n; i++) {
for (int j = 0; j <= i; j++) {
double sum = 0.0;
for (int k = 0; k < j; k++)
sum += L.get(i, k) * L.get(j, k);
double val = i == j ? Math.sqrt(data[i][i] - sum) : (data[i][j] - sum) / L.get(j, j);
L.set(i, j, val);
}
if (Double.isNaN(L.get(i, i)))
return null;
}
return L.transpose();
}
/**
* @return a transposed matrix of current matrix
*/
public DenseMatrix transpose() {
DenseMatrix mat = new DenseMatrix(numColumns, numRows);
for (int i = 0; i < mat.numRows; i++)
for (int j = 0; j < mat.numColumns; j++)
mat.set(i, j, this.data[j][i]);
return mat;
}
/**
* @return a covariance matrix of the current matrix
*/
public DenseMatrix cov() {
DenseMatrix mat = new DenseMatrix(numColumns, numColumns);
for (int i = 0; i < numColumns; i++) {
DenseVector xi = this.column(i);
xi = xi.minus(xi.mean());
mat.set(i, i, xi.inner(xi) / (xi.size - 1));
for (int j = i + 1; j < numColumns; j++) {
DenseVector yi = this.column(j);
double val = xi.inner(yi.minus(yi.mean())) / (xi.size - 1);
mat.set(i, j, val);
mat.set(j, i, val);
}
}
return mat;
}
/**
* Compute the inverse of a matrix by LU decomposition
*
* @return the inverse matrix of current matrix
* @deprecated use {@code inv} instead which is slightly faster
*/
public DenseMatrix inverse() {
if (numRows != numColumns)
throw new RuntimeException("Only square matrix can do inversion");
int n = numRows;
DenseMatrix mat = new DenseMatrix(this);
if (n == 1) {
mat.set(0, 0, 1.0 / mat.get(0, 0));
return mat;
}
int row[] = new int[n];
int col[] = new int[n];
double temp[] = new double[n];
int hold, I_pivot, J_pivot;
double pivot, abs_pivot;
// set up row and column interchange vectors
for (int k = 0; k < n; k++) {
row[k] = k;
col[k] = k;
}
// begin main reduction loop
for (int k = 0; k < n; k++) {
// find largest element for pivot
pivot = mat.get(row[k], col[k]);
I_pivot = k;
J_pivot = k;
for (int i = k; i < n; i++) {
for (int j = k; j < n; j++) {
abs_pivot = Math.abs(pivot);
if (Math.abs(mat.get(row[i], col[j])) > abs_pivot) {
I_pivot = i;
J_pivot = j;
pivot = mat.get(row[i], col[j]);
}
}
}
if (Math.abs(pivot) < 1.0E-10)
throw new RuntimeException("Matrix is singular !");
hold = row[k];
row[k] = row[I_pivot];
row[I_pivot] = hold;
hold = col[k];
col[k] = col[J_pivot];
col[J_pivot] = hold;
// reduce about pivot
mat.set(row[k], col[k], 1.0 / pivot);
for (int j = 0; j < n; j++) {
if (j != k) {
mat.set(row[k], col[j], mat.get(row[k], col[j]) * mat.get(row[k], col[k]));
}
}
// inner reduction loop
for (int i = 0; i < n; i++) {
if (k != i) {
for (int j = 0; j < n; j++) {
if (k != j) {
double val = mat.get(row[i], col[j]) - mat.get(row[i], col[k]) * mat.get(row[k], col[j]);
mat.set(row[i], col[j], val);
}
}
mat.set(row[i], col[k], -mat.get(row[i], col[k]) * mat.get(row[k], col[k]));
}
}
}
// end main reduction loop
// unscramble rows
for (int j = 0; j < n; j++) {
for (int i = 0; i < n; i++)
temp[col[i]] = mat.get(row[i], j);
for (int i = 0; i < n; i++)
mat.set(i, j, temp[i]);
}
// unscramble columns
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++)
temp[row[j]] = mat.get(i, col[j]);
for (int j = 0; j < n; j++)
mat.set(i, j, temp[j]);
}
return mat;
}
/**
* NOTE: this implementation (adopted from PREA package) is slightly faster than {@code inverse}, especially when
* {@code numRows} is large.
*
* @return the inverse matrix of current matrix
*/
public DenseMatrix inv() {
if (this.numRows != this.numColumns)
throw new RuntimeException("Dimensions disagree");
int n = this.numRows;
DenseMatrix mat = DenseMatrix.eye(n);
if (n == 1) {
mat.set(0, 0, 1 / this.get(0, 0));
return mat;
}
DenseMatrix b = new DenseMatrix(this);
for (int i = 0; i < n; i++) {
// find pivot:
double mag = 0;
int pivot = -1;
for (int j = i; j < n; j++) {
double mag2 = Math.abs(b.get(j, i));
if (mag2 > mag) {
mag = mag2;
pivot = j;
}
}
// no pivot (error):
if (pivot == -1 || mag == 0)
return mat;
// move pivot row into position:
if (pivot != i) {
double temp;
for (int j = i; j < n; j++) {
temp = b.get(i, j);
b.set(i, j, b.get(pivot, j));
b.set(pivot, j, temp);
}
for (int j = 0; j < n; j++) {
temp = mat.get(i, j);
mat.set(i, j, mat.get(pivot, j));
mat.set(pivot, j, temp);
}
}
// normalize pivot row:
mag = b.get(i, i);
for (int j = i; j < n; j++)
b.set(i, j, b.get(i, j) / mag);
for (int j = 0; j < n; j++)
mat.set(i, j, mat.get(i, j) / mag);
// eliminate pivot row component from other rows:
for (int k = 0; k < n; k++) {
if (k == i)
continue;
double mag2 = b.get(k, i);
for (int j = i; j < n; j++)
b.set(k, j, b.get(k, j) - mag2 * b.get(i, j));
for (int j = 0; j < n; j++)
mat.set(k, j, mat.get(k, j) - mag2 * mat.get(i, j));
}
}
return mat;
}
/**
* @return MooreCPenrose pseudoinverse based on singular value decomposition (SVD)
*/
public DenseMatrix pinv() {
if (numRows < numColumns) {
DenseMatrix res = this.transpose().pinv();
if (res != null)
res = res.transpose();
return res;
}
SVD svd = this.svd();
DenseMatrix U = svd.getU(), S = svd.getS(), V = svd.getV();
// compute S^+
DenseMatrix SPlus = S.clone();
for (int i = 0; i < SPlus.numRows; i++) {
double val = SPlus.get(i, i);
if (val != 0)
SPlus.set(i, i, 1.0 / val);
}
return V.mult(SPlus).mult(U.transpose());
}
public SVD svd() {
return new SVD(this);
}
/**
* set one value to a specific row
*
* @param row
* row id
* @param val
* value to be set
*/
public void setRow(int row, double val) {
Arrays.fill(data[row], val);
}
/**
* set values of one dense vector to a specific row
*
* @param row
* row id
* @param vals
* values of a dense vector
*/
public void setRow(int row, DenseVector vals) {
for (int j = 0; j < numColumns; j++)
data[row][j] = vals.data[j];
}
/**
* clear and reset all entries to 0
*/
public void clear() {
for (int i = 0; i < numRows; i++)
setRow(i, 0.0);
}
@Override
public String toString() {
return Strings.toString(data);
}
}
| 22,384 | 21.772126 | 119 | java |
librec | librec-master/librec/src/main/java/librec/data/RatingContext.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
/**
* Rating-related Context Information
*
* @author guoguibing
*
*/
public class RatingContext extends Context implements Comparable<RatingContext> {
// rating time stamp, we prefer long to Date or Timestamp for computational convenience
private long timestamp;
// location when giving ratings
private String location;
// accompany user id
private int accompany;
// mood when giving rating
private String mood;
/**
* @param user
* user id
* @param item
* item id
*/
public RatingContext(int user, int item) {
super(user, item);
}
public RatingContext(int user, int item, long timestamp) {
this(user, item);
this.timestamp = timestamp;
}
/**
* @return the location
*/
public String getLocation() {
return location;
}
/**
* @param location
* the location to set
*/
public void setLocation(String location) {
this.location = location;
}
/**
* @return the accompany
*/
public int getAccompany() {
return accompany;
}
/**
* @param accompany
* the accompany to set
*/
public void setAccompany(int accompany) {
this.accompany = accompany;
}
/**
* @return the mood
*/
public String getMood() {
return mood;
}
/**
* @param mood
* the mood to set
*/
public void setMood(String mood) {
this.mood = mood;
}
/**
* @return the timestamp in million seconds
*/
public long getTimestamp() {
return timestamp;
}
/**
* @param timestamp
* the timestamp in million seconds
*/
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
@Override
public int compareTo(RatingContext that) {
double res = this.timestamp - that.timestamp;
if (res > 0)
return 1;
else if (res < 0)
return -1;
return 0;
}
}
| 2,536 | 18.820313 | 90 | java |
librec | librec-master/librec/src/main/java/librec/data/AddConfiguration.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Additional Configuration by a Specific Algorithm
*
* @author Guo Guibing
*
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface AddConfiguration {
/* add more configurations before the main configuration */
public String before() default "";
/* add more configurations after the main configuration */
public String after() default "";
}
| 1,330 | 29.953488 | 71 | java |
librec | librec-master/librec/src/main/java/librec/data/Configuration.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* An annotation to explain the required configuration of a recommender.
*
* @author Guo Guibing
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
public @interface Configuration {
String value() default "";
}
| 1,232 | 28.357143 | 72 | java |
librec | librec-master/librec/src/main/java/librec/data/VectorEntry.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
/**
* An entry of a vector.
*/
public interface VectorEntry {
/**
* Returns the current index
*/
int index();
/**
* Returns the value at the current index
*/
double get();
/**
* Sets the value at the current index
*/
void set(double value);
}
| 992 | 22.642857 | 71 | java |
librec | librec-master/librec/src/main/java/librec/data/ItemContext.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
/**
* Item-related Contextual Information
*
* @author guoguibing
*
*/
public class ItemContext extends Context {
// item name
private String name;
// item price
private double price;
// item category
private String category;
// item description
private String description;
/**
*
* @param user
* fixed as -1
* @param item
* item id
*/
private ItemContext(int user, int item) {
super(-1, item);
}
public ItemContext(int item) {
this(-1, item);
}
/**
* @return the category
*/
public String getCategory() {
return category;
}
/**
* @param category
* the category to set
*/
public void setCategory(String category) {
this.category = category;
}
/**
* @return the price
*/
public double getPrice() {
return price;
}
/**
* @param price
* the price to set
*/
public void setPrice(double price) {
this.price = price;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description
* the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
}
| 2,116 | 17.734513 | 71 | java |
librec | librec-master/librec/src/main/java/librec/data/DenseVector.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.Serializable;
import librec.util.Randoms;
import librec.util.Stats;
import librec.util.Strings;
/**
* Data Structure: dense vector
*
* @author guoguibing
*
*/
public class DenseVector implements Serializable {
private static final long serialVersionUID = -2930574547913792430L;
protected int size;
protected double[] data;
/**
* Construct a dense vector with a specific size
*
* @param size
* the size of vector
*/
public DenseVector(int size) {
this.size = size;
data = new double[size];
}
/**
* Construct a dense vector by deeply copying data from a given array
*/
public DenseVector(double[] array) {
this(array, true);
}
/**
* Construct a dense vector by copying data from a given array
*
* @param array
* a given data array
* @param deep
* whether to deep copy array data
*/
public DenseVector(double[] array, boolean deep) {
this.size = array.length;
if (deep) {
data = new double[array.length];
for (int i = 0; i < size; i++)
data[i] = array[i];
} else {
data = array;
}
}
/**
* Construct a dense vector by deeply copying data from a given vector
*/
public DenseVector(DenseVector vec) {
this(vec.data);
}
/**
* Make a deep copy of current vector
*/
public DenseVector clone() {
return new DenseVector(this);
}
/**
* Initialize a dense vector with Gaussian values
*/
public void init(double mean, double sigma) {
for (int i = 0; i < size; i++)
data[i] = Randoms.gaussian(mean, sigma);
}
/**
* Initialize a dense vector with uniform values in (0, 1)
*/
public void init() {
for (int i = 0; i < size; i++)
data[i] = Randoms.uniform();
}
/**
* Initialize a dense vector with uniform values in (0, range)
*/
public void init(double range) {
for (int i = 0; i < size; i++)
data[i] = Randoms.uniform(0, range);
}
/**
* Get a value at entry [index]
*/
public double get(int idx) {
return data[idx];
}
/**
* @return vector's data
*/
public double[] getData() {
return data;
}
/**
* @return mean of current vector
*/
public double mean() {
return Stats.mean(data);
}
/**
* @return summation of entries
*/
public double sum() {
return Stats.sum(data);
}
/**
* Set a value to entry [index]
*/
public void set(int idx, double val) {
data[idx] = val;
}
/**
* Set a value to all entries
*/
public void setAll(double val) {
for (int i = 0; i < size; i++)
data[i] = val;
}
/**
* Add a value to entry [index]
*/
public void add(int idx, double val) {
data[idx] += val;
}
/**
* Substract a value from entry [index]
*/
public void minus(int idx, double val) {
data[idx] -= val;
}
/**
* @return a dense vector by adding a value to all entries of current vector
*/
public DenseVector add(double val) {
DenseVector result = new DenseVector(size);
for (int i = 0; i < size; i++)
result.data[i] = this.data[i] + val;
return result;
}
/**
* @return a dense vector by substructing a value from all entries of current vector
*/
public DenseVector minus(double val) {
DenseVector result = new DenseVector(size);
for (int i = 0; i < size; i++)
result.data[i] = this.data[i] - val;
return result;
}
/**
* @return a dense vector by scaling a value to all entries of current vector
*/
public DenseVector scale(double val) {
DenseVector result = new DenseVector(size);
for (int i = 0; i < size; i++)
result.data[i] = this.data[i] * val;
return result;
}
/**
* Do vector operation: {@code a + b}
*
* @return a dense vector with results of {@code c = a + b}
*/
public DenseVector add(DenseVector vec) {
assert size == vec.size;
DenseVector result = new DenseVector(size);
for (int i = 0; i < result.size; i++)
result.data[i] = this.data[i] + vec.data[i];
return result;
}
/**
* Do vector operation: {@code a - b}
*
* @return a dense vector with results of {@code c = a - b}
*/
public DenseVector minus(DenseVector vec) {
assert size == vec.size;
DenseVector result = new DenseVector(size);
for (int i = 0; i < vec.size; i++)
result.data[i] = this.data[i] - vec.data[i];
return result;
}
/**
* Do vector operation: {@code a^t * b}
*
* @return the inner product of two vectors
*/
public double inner(DenseVector vec) {
assert size == vec.size;
double result = 0;
for (int i = 0; i < vec.size; i++)
result += get(i) * vec.get(i);
return result;
}
/**
* Do vector operation: {@code a^t * b}
*
* @return the inner product of two vectors
*/
public double inner(SparseVector vec) {
double result = 0;
for (int j : vec.getIndex())
result += vec.get(j) * get(j);
return result;
}
/**
* Do vector operation: {@code a * b^t}
*
* @return the outer product of two vectors
*/
public DenseMatrix outer(DenseVector vec) {
DenseMatrix mat = new DenseMatrix(this.size, vec.size);
for (int i = 0; i < mat.numRows; i++)
for (int j = 0; j < mat.numColumns; j++)
mat.set(i, j, get(i) * vec.get(j));
return mat;
}
/**
*
* @return the Kronecker product of two vectors
*/
public static DenseVector kroneckerProduct(DenseVector M, DenseVector N) {
DenseVector res = new DenseVector(M.size * N.size);
int i = 0;
for (int m = 0; m < M.size; m++) {
double mVal = M.get(m);
for (int n = 0; n < N.size; n++) {
res.set(i++, mVal * N.get(n));
}
}
return res;
}
@Override
public String toString() {
return Strings.toString(data);
}
}
| 6,301 | 19.594771 | 85 | java |
librec | librec-master/librec/src/main/java/librec/data/MatrixEntry.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
/**
* An entry of a matrix.
*/
public interface MatrixEntry {
/**
* Returns the current row index
*/
int row();
/**
* Returns the current column index
*/
int column();
/**
* Returns the value at the current index
*/
double get();
/**
* Sets the value at the current index
*/
void set(double value);
}
| 1,057 | 21.510638 | 71 | java |
librec | librec-master/librec/src/main/java/librec/data/SparseTensor.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import librec.util.Logs;
import librec.util.Randoms;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Table;
/**
*
* Data Structure: Sparse Tensor <br>
*
* <p>
* For easy documentation, here we use {@code (keys, value)} to indicate each entry of a tensor, and {@code index} is
* used to indicate the position in which the entry is stored in the lists.
* </p>
*
* <p>
* <Strong>Reference:</strong> Kolda and Bader, <strong>Tensor Decompositions and Applications</strong>, SIAM REVIEW,
* Vol. 51, No. 3, pp. 455–500
* </p>
*
* @author Guo Guibing
*
*/
public class SparseTensor implements Iterable<TensorEntry>, Serializable {
private static final long serialVersionUID = 2487513413901432943L;
private class TensorIterator implements Iterator<TensorEntry> {
private int index = 0;
private SparseTensorEntry entry = new SparseTensorEntry();
@Override
public boolean hasNext() {
return index < values.size();
}
@Override
public TensorEntry next() {
return entry.update(index++);
}
@Override
public void remove() {
entry.remove();
}
}
private class SparseTensorEntry implements TensorEntry {
private int index = -1;
public SparseTensorEntry update(int index) {
this.index = index;
return this;
}
@Override
public int key(int d) {
return ndKeys[d].get(index);
}
@Override
public double get() {
return values.get(index);
}
@Override
public void set(double value) {
values.set(index, value);
}
/**
* remove the current entry
*/
public void remove() {
for (int d = 0; d < numDimensions; d++) {
// update indices if necessary
if (isIndexed(d))
keyIndices[d].remove(key(d), index);
ndKeys[d].remove(index);
}
values.remove(index);
}
public String toString() {
StringBuilder sb = new StringBuilder();
for (int d = 0; d < numDimensions; d++) {
sb.append(key(d)).append("\t");
}
sb.append(get());
return sb.toString();
}
@Override
public int[] keys() {
int[] res = new int[numDimensions];
for (int d = 0; d < numDimensions; d++) {
res[d] = key(d);
}
return res;
}
}
/**
* number of dimensions, i.e., the order (or modes, ways) of a tensor
*/
private int numDimensions;
private int[] dimensions;
private List<Integer>[] ndKeys; // n-dimensional array
private List<Double> values; // values
private Multimap<Integer, Integer>[] keyIndices; // each multimap = {key, {pos1, pos2, ...}}
private List<Integer> indexedDimensions; // indexed dimensions
// dimensions of users and items
private int userDimension, itemDimension;
/**
* Construct an empty sparse tensor
*
* @param dims
* dimensions of a tensor
*/
public SparseTensor(int... dims) {
this(dims, null, null);
}
/**
* Construct a sparse tensor with indices and values
*
* @param dims
* dimensions of a tensor
* @param nds
* n-dimensional keys
* @param vals
* entry values
*/
@SuppressWarnings("unchecked")
public SparseTensor(int[] dims, List<Integer>[] nds, List<Double> vals) {
if (dims.length < 3)
throw new Error("The dimension of a tensor cannot be smaller than 3!");
numDimensions = dims.length;
dimensions = new int[numDimensions];
ndKeys = (List<Integer>[]) new List<?>[numDimensions];
keyIndices = (Multimap<Integer, Integer>[]) new Multimap<?, ?>[numDimensions];
for (int d = 0; d < numDimensions; d++) {
dimensions[d] = dims[d];
ndKeys[d] = nds == null ? new ArrayList<Integer>() : new ArrayList<Integer>(nds[d]);
keyIndices[d] = HashMultimap.create();
}
values = vals == null ? new ArrayList<Double>() : new ArrayList<>(vals);
indexedDimensions = new ArrayList<>(numDimensions);
}
/**
* make a deep clone
*/
public SparseTensor clone() {
SparseTensor res = new SparseTensor(dimensions);
// copy indices and values
for (int d = 0; d < numDimensions; d++) {
res.ndKeys[d].addAll(this.ndKeys[d]);
res.keyIndices[d].putAll(this.keyIndices[d]);
}
res.values.addAll(this.values);
// copy indexed array
res.indexedDimensions.addAll(this.indexedDimensions);
// others
res.userDimension = userDimension;
res.itemDimension = itemDimension;
return res;
}
/**
* Add a value to a given i-entry
*
* @param val
* value to add
* @param keys
* n-dimensional keys
*/
public void add(double val, int... keys) throws Exception {
int index = findIndex(keys);
if (index >= 0) {
// if keys exist: update value
values.set(index, values.get(index) + val);
} else {
// if keys do not exist: add a new entry
set(val, keys);
}
}
/**
* Set a value to a specific i-entry
*
* @param val
* value to set
* @param keys
* n-dimensional keys
*/
public void set(double val, int... keys) throws Exception {
int index = findIndex(keys);
// if i-entry exists, set it a new value
if (index >= 0) {
values.set(index, val);
return;
}
// otherwise insert a new entry
for (int d = 0; d < numDimensions; d++) {
ndKeys[d].add(keys[d]);
// update indices if necessary
if (isIndexed(d)) {
keyIndices[d].put(keys[d], ndKeys[d].size() - 1);
// other keys' indices do not change
}
}
values.add(val);
}
/**
* remove an entry with specific keys. NOTE: it is not recommended to remove by entry index because the index may be
* changed after operations are executed, especially operation as addiction, remove, etc.
*/
public boolean remove(int... keys) throws Exception {
int index = findIndex(keys);
if (index < 0)
return false;
for (int d = 0; d < numDimensions; d++) {
ndKeys[d].remove(index);
// update indices if necessary
if (isIndexed(d)) {
buildIndex(d); // re-build indices
}
}
values.remove(index);
return true;
}
/**
* @return all entries for a (user, item) pair
*/
public List<Integer> getIndices(int user, int item) {
List<Integer> res = new ArrayList<>();
Collection<Integer> indices = getIndex(userDimension, user);
for (int index : indices) {
if (key(itemDimension, index) == item) {
res.add(index);
}
}
return res;
}
/**
* find the inner index of a given keys
*/
private int findIndex(int... keys) throws Exception {
if (keys.length != numDimensions)
throw new Exception("The given input does not match with the tensor dimension!");
// if no data exists
if (values.size() == 0)
return -1;
// if no indexed dimension exists
if (indexedDimensions.size() == 0)
buildIndex(0);
// retrieve from the first indexed dimension
int d = indexedDimensions.get(0);
// all relevant positions
Collection<Integer> indices = keyIndices[d].get(keys[d]);
if (indices == null || indices.size() == 0)
return -1;
// for each possible position
for (int index : indices) {
boolean found = true;
for (int dd = 0; dd < numDimensions; dd++) {
if (keys[dd] != key(dd, index)) {
found = false;
break;
}
}
if (found)
return index;
}
// if not found
return -1;
}
/**
* A fiber is defined by fixing every index but one. For example, a matrix column is a mode-1 fiber and a matrix row
* is a mode-2 fiber.
*
* @param dim
* the dimension where values can vary
* @param keys
* the other fixed dimension keys
* @return a sparse vector
*/
public SparseVector fiber(int dim, int... keys) {
if ((keys.length != numDimensions - 1) || size() < 1)
throw new Error("The input indices do not match the fiber specification!");
// find an indexed dimension for searching indices
int d = -1;
if ((indexedDimensions.size() == 0) || (indexedDimensions.contains(dim) && indexedDimensions.size() == 1)) {
d = (dim != 0 ? 0 : 1);
buildIndex(d);
} else {
for (int dd : indexedDimensions) {
if (dd != dim) {
d = dd;
break;
}
}
}
SparseVector res = new SparseVector(dimensions[dim]);
// all relevant positions
Collection<Integer> indices = keyIndices[d].get(keys[d < dim ? d : d - 1]);
if (indices == null || indices.size() == 0)
return res;
// for each possible position
for (int index : indices) {
boolean found = true;
for (int dd = 0, ndi = 0; dd < numDimensions; dd++) {
if (dd == dim)
continue;
if (keys[ndi++] != key(dd, index)) {
found = false;
break;
}
}
if (found) {
res.set(key(dim, index), value(index));
}
}
return res;
}
/**
* Check if a given keys exists
*
* @param keys
* keys to check
* @return true if found, and false otherwise
*/
public boolean contains(int... keys) throws Exception {
return findIndex(keys) >= 0 ? true : false;
}
/**
* @return whether a dimension d is indexed
*/
public boolean isIndexed(int d) {
return indexedDimensions.contains(d);
}
/**
* @return whether a tensor is cubical
*/
public boolean isCubical() {
int dim = dimensions[0];
for (int d = 1; d < numDimensions; d++) {
if (dim != dimensions[d])
return false;
}
return true;
}
/**
* @return whether a tensor is diagonal
*/
public boolean isDiagonal() {
for (TensorEntry te : this) {
double val = te.get();
if (val != 0) {
int i = te.key(0);
for (int d = 0; d < numDimensions; d++) {
int j = te.key(d);
if (i != j)
return false;
}
}
}
return true;
}
/**
* @return a value given a specific i-entry
*/
public double get(int... keys) throws Exception {
assert keys.length == this.numDimensions;
int index = findIndex(keys);
return index < 0 ? 0 : values.get(index);
}
/**
* Shuffle a sparse tensor
*/
public void shuffle() {
int len = size();
for (int i = 0; i < len; i++) {
// target index
int j = i + Randoms.uniform(len - i);
// swap values
double temp = values.get(i);
values.set(i, values.get(j));
values.set(j, temp);
// swap keys
for (int d = 0; d < numDimensions; d++) {
int ikey = key(d, i);
int jkey = key(d, j);
ndKeys[d].set(i, jkey);
ndKeys[d].set(j, ikey);
// update indices
if (isIndexed(d)) {
keyIndices[d].remove(jkey, j);
keyIndices[d].put(jkey, i);
keyIndices[d].remove(ikey, i);
keyIndices[d].put(ikey, j);
}
}
}
}
/**
* build index at dimensions nd
*
* @param dims
* dimensions to be indexed
*/
public void buildIndex(int... dims) {
for (int d : dims) {
keyIndices[d].clear();
for (int index = 0; index < ndKeys[d].size(); index++) {
keyIndices[d].put(key(d, index), index);
}
if (!indexedDimensions.contains(d))
indexedDimensions.add(d);
}
}
/**
* build index for all dimensions
*/
public void buildIndices() {
for (int d = 0; d < numDimensions; d++) {
buildIndex(d);
}
}
/**
* @return indices (positions) of a key in dimension d
*/
public Collection<Integer> getIndex(int d, int key) {
if (!isIndexed(d))
buildIndex(d);
return keyIndices[d].get(key);
}
/**
* @return keys in a given index
*/
public int[] keys(int index) {
int[] res = new int[numDimensions];
for (int d = 0; d < numDimensions; d++) {
res[d] = key(d, index);
}
return res;
}
/**
* @return key in the position {@code index} of dimension {@code d}
*/
public int key(int d, int index) {
return ndKeys[d].get(index);
}
/**
* @return value in a given index
*/
public double value(int index) {
return values.get(index);
}
/**
* @param sd
* source dimension
* @param key
* key in the source dimension
* @param td
* target dimension
*
* @return keys in a target dimension {@code td} related with a key in dimension {@code sd}
*/
public List<Integer> getRelevantKeys(int sd, int key, int td) {
Collection<Integer> indices = getIndex(sd, key);
List<Integer> res = null;
if (indices != null) {
res = new ArrayList<>();
for (int index : indices) {
res.add(key(td, index));
}
}
return res;
}
/**
* @return number of entries of the tensor
*/
public int size() {
return values.size();
}
/**
* Slice is a two-dimensional sub-array of a tensor, defined by fixing all but two indices.
*
* @param rowDim
* row dimension
* @param colDim
* column dimension
* @param otherKeys
* keys of other dimensions
*
* @return a sparse matrix
*/
public SparseMatrix slice(int rowDim, int colDim, int... otherKeys) {
if (otherKeys.length != numDimensions - 2)
throw new Error("The input dimensions do not match the tensor specification!");
// find an indexed array to search
int d = -1;
boolean cond1 = indexedDimensions.size() == 0;
boolean cond2 = (indexedDimensions.contains(rowDim) || indexedDimensions.contains(colDim))
&& indexedDimensions.size() == 1;
boolean cond3 = indexedDimensions.contains(rowDim) && indexedDimensions.contains(colDim)
&& indexedDimensions.size() == 2;
if (cond1 || cond2 || cond3) {
for (d = 0; d < numDimensions; d++) {
if (d != rowDim && d != colDim)
break;
}
buildIndex(d);
} else {
for (int dd : indexedDimensions) {
if (dd != rowDim && dd != colDim) {
d = dd;
break;
}
}
}
// get search key
int key = -1;
for (int dim = 0, i = 0; dim < numDimensions; dim++) {
if (dim == rowDim || dim == colDim)
continue;
if (dim == d) {
key = otherKeys[i];
break;
}
i++;
}
// all relevant positions
Collection<Integer> indices = keyIndices[d].get(key);
if (indices == null || indices.size() == 0)
return null;
Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
// for each possible position
for (int index : indices) {
boolean found = true;
for (int dd = 0, j = 0; dd < numDimensions; dd++) {
if (dd == rowDim || dd == colDim)
continue;
if (otherKeys[j++] != key(dd, index)) {
found = false;
break;
}
}
if (found) {
int row = ndKeys[rowDim].get(index);
int col = ndKeys[colDim].get(index);
double val = values.get(index);
dataTable.put(row, col, val);
colMap.put(col, row);
}
}
return new SparseMatrix(dimensions[rowDim], dimensions[colDim], dataTable, colMap);
}
/**
* Re-ordering entries of a tensor into a matrix
*
* @param n
* mode or dimension
* @return an unfolded or flatten matrix
*/
public SparseMatrix matricization(int n) {
int numRows = dimensions[n];
int numCols = 1;
for (int d = 0; d < numDimensions; d++) {
if (d != n)
numCols *= dimensions[d];
}
Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
for (TensorEntry te : this) {
int[] keys = te.keys();
int i = keys[n];
int j = 0;
for (int k = 0; k < numDimensions; k++) {
if (k == n)
continue;
int ik = keys[k];
int jk = 1;
for (int m = 0; m < k; m++) {
if (m == n)
continue;
jk *= dimensions[m];
}
j += ik * jk;
}
dataTable.put(i, j, te.get());
colMap.put(j, i);
}
return new SparseMatrix(numRows, numCols, dataTable, colMap);
}
/**
* n-mode product of a tensor A (I1 x I2 x ... x IN) with a matrix B (J x In), denoted by A Xn B
*
* @param mat
* mat to be multiplied
* @param dim
* mode/dimension of the tensor to be used
* @return a new tensor in (I1 x I2 x ... x In-1 x J x In+1 x ... x IN)
*/
public SparseTensor modeProduct(DenseMatrix mat, int dim) throws Exception {
if (dimensions[dim] != mat.numColumns)
throw new Exception("Dimensions of a tensor and a matrix do not match for n-mode product!");
int[] dims = new int[numDimensions];
for (int i = 0; i < dims.length; i++) {
dims[i] = i == dim ? mat.numRows : dimensions[i];
}
SparseTensor res = new SparseTensor(dims);
for (TensorEntry te : this) {
double val = te.get();
int[] keys = te.keys();
int i = keys[dim];
for (int j = 0; j < mat.numRows; j++) {
int[] ks = new int[numDimensions];
for (int k = 0; k < ks.length; k++)
ks[k] = k == dim ? j : keys[k];
res.add(val * mat.get(j, i), ks);
}
}
return res;
}
/**
* n-mode product of a tensor A (I1 x I2 x ... x IN) with a vector B (1 x In), denoted by A Xn B
*
* @param vec
* vector to be multiplied
* @param dim
* mode/dimension of the tensor to be used
* @return a new tensor in (I1 x I2 x ... x In-1 x 1 x In+1 x ... x IN)
*/
public SparseTensor modeProduct(DenseVector vec, int dim) throws Exception {
if (dimensions[dim] != vec.size)
throw new Exception("Dimensions of a tensor and a vector do not match for n-mode product!");
int[] dims = new int[numDimensions];
for (int i = 0; i < dims.length; i++) {
dims[i] = i == dim ? 1 : dimensions[i];
}
SparseTensor res = new SparseTensor(dims);
for (TensorEntry te : this) {
double val = te.get();
int[] keys = te.keys();
int i = keys[dim];
int[] ks = new int[numDimensions];
for (int k = 0; k < ks.length; k++)
ks[k] = k == dim ? 1 : keys[k];
res.add(val * vec.get(i), ks);
}
return res;
}
/**
* retrieve a rating matrix from the tensor. Warning: it assumes there is at most one entry for each (user, item)
* pair.
*
* @param userDim
* dimension of users
* @param itemDim
* dimension of items
* @return a sparse rating matrix
*/
public SparseMatrix rateMatrix() {
Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
Multimap<Integer, Integer> colMap = HashMultimap.create();
for (TensorEntry te : this) {
int u = te.key(userDimension);
int i = te.key(itemDimension);
dataTable.put(u, i, te.get());
colMap.put(i, u);
}
return new SparseMatrix(dimensions[userDimension], dimensions[itemDimension], dataTable, colMap);
}
@Override
public Iterator<TensorEntry> iterator() {
return new TensorIterator();
}
/**
* @return norm of a tensor
*/
public double norm() {
double res = 0;
for (double val : values) {
res += val * val;
}
return Math.sqrt(res);
}
/**
* @return inner product with another tensor
*/
public double innerProduct(SparseTensor st) throws Exception {
if (!isDimMatch(st))
throw new Exception("The dimensions of two sparse tensors do not match!");
double res = 0;
for (TensorEntry te : this) {
double v1 = te.get();
double v2 = st.get(te.keys());
res += v1 * v2;
}
return res;
}
/**
* @return whether two sparse tensors have the same dimensions
*/
public boolean isDimMatch(SparseTensor st) {
if (numDimensions != st.numDimensions)
return false;
boolean match = true;
for (int d = 0; d < numDimensions; d++) {
if (dimensions[d] != st.dimensions[d]) {
match = false;
break;
}
}
return match;
}
public int getUserDimension() {
return userDimension;
}
public void setUserDimension(int userDimension) {
this.userDimension = userDimension;
}
public int getItemDimension() {
return itemDimension;
}
public void setItemDimension(int itemDimension) {
this.itemDimension = itemDimension;
}
public int[] dimensions() {
return dimensions;
}
/**
* @return the number of a tensor's dimensions, a.k.a., the order of a tensor
*/
public int numDimensions() {
return numDimensions;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("N-Dimension: ").append(numDimensions).append(", Size: ").append(size()).append("\n");
for (int index = 0; index < values.size(); index++) {
for (int d = 0; d < numDimensions; d++) {
sb.append(key(d, index)).append("\t");
}
sb.append(value(index)).append("\n");
}
return sb.toString();
}
/**
* Usage demonstration
*/
public static void main(String[] args) throws Exception {
SparseTensor st = new SparseTensor(4, 4, 6);
st.set(1.0, 1, 0, 0);
st.set(1.5, 1, 0, 0); // overwrite value
st.set(2.0, 1, 1, 0);
st.set(3.0, 2, 0, 0);
st.set(4.0, 1, 3, 0);
st.set(5.0, 1, 0, 5);
st.set(6.0, 3, 1, 4);
Logs.debug(st);
Logs.debug("Keys (1, 0, 0) = {}", st.get(1, 0, 0));
Logs.debug("Keys (1, 1, 0) = {}", st.get(1, 1, 0));
Logs.debug("Keys (1, 2, 0) = {}", st.get(1, 2, 0));
Logs.debug("Keys (2, 0, 0) = {}", st.get(2, 0, 0));
Logs.debug("Keys (1, 0, 6) = {}", st.get(1, 0, 6));
Logs.debug("Keys (3, 1, 4) = {}", st.get(3, 1, 4));
Logs.debug("Index of dimension 0 key 1 = {}", st.getIndex(0, 1));
Logs.debug("Index of dimension 1 key 3 = {}", st.getIndex(1, 3));
Logs.debug("Index of dimension 2 key 1 = {}", st.getIndex(2, 1));
Logs.debug("Index of dimension 2 key 6 = {}", st.getIndex(2, 6));
st.set(4.5, 2, 1, 1);
Logs.debug(st);
Logs.debug("Index of dimension 2 key 1 = {}", st.getIndex(2, 1));
st.remove(2, 1, 1);
Logs.debug("Index of dimension 2 key 1 = {}", st.getIndex(2, 1));
Logs.debug("Index of keys (1, 2, 0) = {}, value = {}", st.findIndex(1, 2, 0), st.get(1, 2, 0));
Logs.debug("Index of keys (3, 1, 4) = {}, value = {}", st.findIndex(3, 1, 4), st.get(3, 1, 4));
Logs.debug("Keys in dimension 2 associated with dimension 0 key 1 = {}", st.getRelevantKeys(0, 1, 2));
// norm
Logs.debug("norm = {}", st.norm());
// clone
SparseTensor st2 = st.clone();
Logs.debug("make a clone = {}", st2);
// inner product
Logs.debug("inner with the clone = {}", st.innerProduct(st2));
st.set(2.5, 1, 0, 0);
st2.remove(1, 0, 0);
Logs.debug("st1 = {}", st);
Logs.debug("st2 = {}", st2);
// fiber
Logs.debug("fiber (0, 0, 0) = {}", st.fiber(0, 0, 0));
Logs.debug("fiber (1, 1, 0) = {}", st.fiber(1, 1, 0));
Logs.debug("fiber (2, 1, 0) = {}", st.fiber(2, 1, 0));
// slice
Logs.debug("slice (0, 1, 0) = {}", st.slice(0, 1, 0));
Logs.debug("slice (0, 2, 1) = {}", st.slice(0, 2, 1));
Logs.debug("slice (1, 2, 1) = {}", st.slice(1, 2, 1));
// iterator
for (TensorEntry te : st) {
te.set(te.get() + 0.588);
}
Logs.debug("Before shuffle: {}", st);
// shuffle
st.shuffle();
Logs.debug("After shuffle: {}", st);
// new tensor: example given by the reference paper in (2.1)
st = new SparseTensor(3, 4, 2);
st.set(1, 0, 0, 0);
st.set(4, 0, 1, 0);
st.set(7, 0, 2, 0);
st.set(10, 0, 3, 0);
st.set(2, 1, 0, 0);
st.set(5, 1, 1, 0);
st.set(8, 1, 2, 0);
st.set(11, 1, 3, 0);
st.set(3, 2, 0, 0);
st.set(6, 2, 1, 0);
st.set(9, 2, 2, 0);
st.set(12, 2, 3, 0);
st.set(13, 0, 0, 1);
st.set(16, 0, 1, 1);
st.set(19, 0, 2, 1);
st.set(22, 0, 3, 1);
st.set(14, 1, 0, 1);
st.set(17, 1, 1, 1);
st.set(20, 1, 2, 1);
st.set(23, 1, 3, 1);
st.set(15, 2, 0, 1);
st.set(18, 2, 1, 1);
st.set(21, 2, 2, 1);
st.set(24, 2, 3, 1);
Logs.debug("A new tensor = {}", st);
Logs.debug("Mode X0 unfoldings = {}", st.matricization(0));
Logs.debug("Mode X1 unfoldings = {}", st.matricization(1));
Logs.debug("Mode X2 unfoldings = {}", st.matricization(2));
}
}
| 24,174 | 22.470874 | 117 | java |
librec | librec-master/librec/src/main/java/librec/ranking/BPR.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import librec.data.Configuration;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.IterativeRecommender;
import librec.util.Randoms;
import librec.util.Strings;
/**
*
* Rendle et al., <strong>BPR: Bayesian Personalized Ranking from Implicit Feedback</strong>, UAI 2009.
*
* @author guoguibing
*
*/
@Configuration("binThold, factors, lRate, maxLRate, regU, regI, numIters, isBoldDriver")
public class BPR extends IterativeRecommender {
public BPR(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
super.initModel();
userCache = trainMatrix.rowCache(cacheSpec);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (int s = 0, smax = numUsers * 100; s < smax; s++) {
// randomly draw (u, i, j)
int u = 0, i = 0, j = 0;
while (true) {
u = Randoms.uniform(numUsers);
SparseVector pu = userCache.get(u);
if (pu.getCount() == 0 || pu.getCount() == numItems)
continue;
int[] is = pu.getIndex();
i = is[Randoms.uniform(is.length)];
do {
j = Randoms.uniform(numItems);
} while (pu.contains(j));
break;
}
// update parameters
double xui = predict(u, i);
double xuj = predict(u, j);
double xuij = xui - xuj;
double vals = -Math.log(g(xuij));
loss += vals;
double cmg = g(-xuij);
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
P.add(u, f, lRate * (cmg * (qif - qjf) - regU * puf));
Q.add(i, f, lRate * (cmg * puf - regI * qif));
Q.add(j, f, lRate * (cmg * (-puf) - regI * qjf));
loss += regU * puf * puf + regI * qif * qif + regI * qjf * qjf;
}
}
if (isConverged(iter))
break;
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, numFactors, initLRate, maxLRate, regU, regI, numIters, isBoldDriver }, ",");
}
}
| 2,904 | 24.482456 | 127 | java |
librec | librec-master/librec/src/main/java/librec/ranking/WRMF.java | package librec.ranking;
import librec.data.*;
import librec.intf.IterativeRecommender;
import librec.util.Logs;
import librec.util.Strings;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* <h3>WRMF: Weighted Regularized Matrix Factorization.</h3>
* <p>
* This implementation refers to the method proposed by Hu et al. at ICDM 2008.
* <p>
* <ul>
* <li><strong>Binary ratings:</strong> Pan et al., One-class Collaborative
* Filtering, ICDM 2008.</li>
* <li><strong>Real ratings:</strong> Hu et al., Collaborative filtering for
* implicit feedback datasets, ICDM 2008.</li>
* </ul>
*
* @author wkq
*/
@Configuration("binThold, alpha, factors, regU, regI, numIters")
public class WRMF extends IterativeRecommender {
private float alpha;
private SparseMatrix CuiI;// C_{ui} = alpha * r_{ui} C_{ui}-1
private SparseMatrix Pui;// P_{ui} = 1 if r_{ui}>0 or P_{ui} = 0
private List<List<Integer>> userItemList, itemUserList;
public WRMF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true; // item recommendation
alpha = algoOptions.getFloat("-alpha");
// checkBinary();
}
@Override
protected void initModel() {
P = new DenseMatrix(numUsers, numFactors);
Q = new DenseMatrix(numItems, numFactors);
// initialize model
if (initByNorm) {
System.out.println("initByNorm");
P.init(initMean, initStd);
Q.init(initMean, initStd);
} else {
P.init(); // P.init(smallValue);
Q.init(); // Q.init(smallValue);
}
// predefined CuiI and Pui
CuiI = new SparseMatrix(trainMatrix);
Pui = new SparseMatrix(trainMatrix);
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
CuiI.set(u, i, alpha * 1);
// CuiI.set(u, i, Math.log(1.0 + Math.pow(10, alpha) * me.get()));
Pui.set(u, i, 1.0d);
}
this.userItemList = new ArrayList<>();
this.itemUserList = new ArrayList<>();
for (int u = 0; u < numUsers; u++) {
userItemList.add(trainMatrix.getColumns(u));
}
for (int i = 0; i < numItems; i++) {
itemUserList.add(trainMatrix.getRows(i));
}
}
@Override
protected void buildModel() throws Exception {
// To be consistent with the symbols in the paper
DenseMatrix X = P, Y = Q;
SparseMatrix IuMatrix = DiagMatrix.eye(numFactors).scale(regU);
SparseMatrix IiMatrix = DiagMatrix.eye(numFactors).scale(regI);
for (int iter = 1; iter <= numIters; iter++) {
// Step 1: update user factors;
DenseMatrix Yt = Y.transpose();
DenseMatrix YtY = Yt.mult(Y);
for (int u = 0; u < numUsers; u++) {
if (verbose && (u + 1) % numUsers == 0)
Logs.debug("{}{} runs at iteration = {}, user = {}/{} {}", algoName, foldInfo, iter, u + 1,
numUsers, new Date());
DenseMatrix YtCuI = new DenseMatrix(numFactors, numItems);
for (int i : userItemList.get(u)) {
for (int k = 0; k < numFactors; k++) {
YtCuI.set(k, i, Y.get(i, k) * CuiI.get(u, i));
}
}
// YtY + Yt * (Cu - I) * Y
DenseMatrix YtCuY = new DenseMatrix(numFactors, numFactors);
for (int k = 0; k < numFactors; k++) {
for (int f = 0; f < numFactors; f++) {
double value = 0.0;
for (int i : userItemList.get(u)) {
value += YtCuI.get(k, i) * Y.get(i, f);
}
YtCuY.set(k, f, value);
}
}
YtCuY = YtCuY.add(YtY);
// (YtCuY + lambda * I)^-1
// lambda * I can be pre-difined because every time is the same.
DenseMatrix Wu = (YtCuY.add(IuMatrix)).inv();
// Yt * (Cu - I) * Pu + Yt * Pu
DenseVector YtCuPu = new DenseVector(numFactors);
for (int f = 0; f < numFactors; f++) {
for (int i : userItemList.get(u)) {
YtCuPu.add(f, Pui.get(u, i) * (YtCuI.get(f, i) + Yt.get(f, i)));
}
}
DenseVector xu = Wu.mult(YtCuPu);
// udpate user factors
X.setRow(u, xu);
}
// Step 2: update item factors;
DenseMatrix Xt = X.transpose();
DenseMatrix XtX = Xt.mult(X);
for (int i = 0; i < numItems; i++) {
if (verbose && (i + 1) % numItems == 0)
Logs.debug("{}{} runs at iteration = {}, item = {}/{} {}", algoName, foldInfo, iter, i + 1,
numItems, new Date());
DenseMatrix XtCiI = new DenseMatrix(numFactors, numUsers);
// actually XtCiI is a sparse matrix
// Xt * (Ci-I)
for (int u : itemUserList.get(i)) {
for (int k = 0; k < numFactors; k++) {
XtCiI.set(k, u, X.get(u, k) * CuiI.get(u, i));
}
}
// XtX + Xt * (Ci - I) * X
DenseMatrix XtCiX = new DenseMatrix(numFactors, numFactors);
for (int k = 0; k < numFactors; k++) {
for (int f = 0; f < numFactors; f++) {
double value = 0.0;
for (int u : itemUserList.get(i)) {
value += XtCiI.get(k, u) * X.get(u, f);
}
XtCiX.set(k, f, value);
}
}
XtCiX = XtCiX.add(XtX);
// (XtCuX + lambda * I)^-1
// lambda * I can be pre-difined because every time is the same.
DenseMatrix Wi = (XtCiX.add(IiMatrix)).inv();
// Xt * (Ci - I) * Pu + Xt * Pu
DenseVector XtCiPu = new DenseVector(numFactors);
for (int f = 0; f < numFactors; f++) {
for (int u : itemUserList.get(i)) {
XtCiPu.add(f, Pui.get(u, i) * (XtCiI.get(f, u) + Xt.get(f, u)));
}
}
DenseVector yi = Wi.mult(XtCiPu);
// udpate item factors
Y.setRow(i, yi);
}
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, alpha, numFactors, regU, regI, numIters }, ",");
}
}
| 5,491 | 29.175824 | 99 | java |
librec | librec-master/librec/src/main/java/librec/ranking/LDA.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import static librec.util.Gamma.digamma;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.GraphicRecommender;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
/**
* Latent Dirichlet Allocation for implicit feedback: Tom Griffiths, <strong>Gibbs sampling in the generative model of
* Latent Dirichlet Allocation</strong>, 2002. <br>
*
* <p>
* <strong>Remarks:</strong> This implementation of LDA is for implicit feedback, where users are regarded as documents
* and items as words. To directly apply LDA to explicit ratings, Ian Porteous et al. (AAAI 2008, Section Bi-LDA)
* mentioned that, one way is to treat items as documents and ratings as words. We did not provide such an LDA
* implementation for explicit ratings. Instead, we provide recommender {@code URP} as an alternative LDA model for
* explicit ratings.
* </p>
*
* @author Guibing Guo
*
*/
@AddConfiguration(before = "factors, alpha, beta")
public class LDA extends GraphicRecommender {
public LDA(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
}
@Override
protected void initModel() throws Exception {
PukSum = new DenseMatrix(numUsers, numFactors);
PkiSum = new DenseMatrix(numFactors, numItems);
// initialize count variables.
Nuk = new DenseMatrix(numUsers, numFactors);
Nu = new DenseVector(numUsers);
Nki = new DenseMatrix(numFactors, numItems);
Nk = new DenseVector(numFactors);
alpha = new DenseVector(numFactors);
alpha.setAll(initAlpha);
beta = new DenseVector(numItems);
beta.setAll(initBeta);
// The z_u,i are initialized to values in [0, K-1] to determine the initial state of the Markov chain.
z = HashBasedTable.create();
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
int t = (int) (Math.random() * numFactors); // 0 ~ k-1
// assign a topic t to pair (u, i)
z.put(u, i, t);
// number of items of user u assigned to topic t.
Nuk.add(u, t, 1);
// total number of items of user u
Nu.add(u, 1);
// number of instances of item i assigned to topic t
Nki.add(t, i, 1);
// total number of words assigned to topic t.
Nk.add(t, 1);
}
}
protected void eStep() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
// Gibbs sampling from full conditional distribution
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
int t = z.get(u, i); // topic
Nuk.add(u, t, -1);
Nu.add(u, -1);
Nki.add(t, i, -1);
Nk.add(t, -1);
// do multinomial sampling via cumulative method:
double[] p = new double[numFactors];
for (int k = 0; k < numFactors; k++) {
p[k] = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha) * (Nki.get(k, i) + beta.get(i))
/ (Nk.get(k) + sumBeta);
}
// cumulating multinomial parameters
for (int k = 1; k < p.length; k++) {
p[k] += p[k - 1];
}
// scaled sample because of unnormalized p[], randomly sampled a new topic t
double rand = Math.random() * p[numFactors - 1];
for (t = 0; t < p.length; t++) {
if (rand < p[t])
break;
}
// add newly estimated z_i to count variables
Nuk.add(u, t, 1);
Nu.add(u, 1);
Nki.add(t, i, 1);
Nk.add(t, 1);
z.put(u, i, t);
}
}
@Override
protected void mStep() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
double ak, bi;
// update alpha vector
for (int k = 0; k < numFactors; k++) {
ak = alpha.get(k);
double numerator = 0, denominator = 0;
for (int u = 0; u < numUsers; u++) {
numerator += digamma(Nuk.get(u, k) + ak) - digamma(ak);
denominator += digamma(Nu.get(u) + sumAlpha) - digamma(sumAlpha);
}
if (numerator != 0)
alpha.set(k, ak * (numerator / denominator));
}
// update beta_k
for (int i = 0; i < numItems; i++) {
bi = beta.get(i);
double numerator = 0, denominator = 0;
for (int k = 0; k < numFactors; k++) {
numerator += digamma(Nki.get(k, i) + bi) - digamma(bi);
denominator += digamma(Nk.get(k) + sumBeta) - digamma(sumBeta);
}
if (numerator != 0)
beta.set(i, bi * (numerator / denominator));
}
}
/**
* Add to the statistics the values of theta and phi for the current state.
*/
protected void readoutParams() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
double val = 0;
for (int u = 0; u < numUsers; u++) {
for (int k = 0; k < numFactors; k++) {
val = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha);
PukSum.add(u, k, val);
}
}
for (int k = 0; k < numFactors; k++) {
for (int i = 0; i < numItems; i++) {
val = (Nki.get(k, i) + beta.get(i)) / (Nk.get(k) + sumBeta);
PkiSum.add(k, i, val);
}
}
numStats++;
}
@Override
protected void estimateParams() {
Puk = PukSum.scale(1.0 / numStats);
Pki = PkiSum.scale(1.0 / numStats);
}
@Override
public double ranking(int u, int j) throws Exception {
return DenseMatrix.product(Puk, u, Pki, j);
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, initAlpha, initBeta }) + ", " + super.toString();
}
}
| 6,058 | 27.313084 | 119 | java |
librec | librec-master/librec/src/main/java/librec/ranking/WBPR.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
import librec.util.Lists;
import librec.util.Randoms;
import librec.util.Strings;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
/**
*
* Gantner et al., <strong>Bayesian Personalized Ranking for Non-Uniformly Sampled Items</strong>, JMLR, 2012.
*
* @author guoguibing
*
*/
public class WBPR extends IterativeRecommender {
private List<Entry<Integer, Double>> sortedItemPops;
private LoadingCache<Integer, List<Entry<Integer, Double>>> cacheItemProbs;
public WBPR(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
super.initModel();
itemBias = new DenseVector(numItems);
itemBias.init(smallValue);
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
// pre-compute and sort by item's popularity
sortedItemPops = new ArrayList<>();
for (int i = 0; i < numItems; i++) {
sortedItemPops.add(new AbstractMap.SimpleEntry<Integer, Double>(i, trainMatrix.columnSize(i) + 0.0));
}
Lists.sortList(sortedItemPops, true);
// cache each user's candidate items with probabilities
cacheItemProbs = CacheBuilder.from(cacheSpec).build(new CacheLoader<Integer, List<Entry<Integer, Double>>>() {
@Override
public List<Entry<Integer, Double>> load(Integer u) throws Exception {
List<Entry<Integer, Double>> itemProbs = new ArrayList<>();
List<Integer> ratedItems = userItemsCache.get(u);
// filter candidate items
double sum = 0;
for (Entry<Integer, Double> itemPop : sortedItemPops) {
Integer item = itemPop.getKey();
double popularity = itemPop.getValue();
if (!ratedItems.contains(item) && popularity > 0) {
// make a clone to prevent bugs from normalization
itemProbs.add(new AbstractMap.SimpleEntry<Integer, Double>(itemPop));
sum += popularity;
}
}
// normalization
for (Entry<Integer, Double> itemProb : itemProbs) {
itemProb.setValue(itemProb.getValue() / sum);
}
return itemProbs;
}
});
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (int s = 0, smax = numUsers * 100; s < smax; s++) {
// randomly draw (u, i, j)
int u = 0, i = 0, j = 0;
List<Integer> ratedItems = null;
List<Entry<Integer, Double>> itemProbs = null;
while (true) {
u = Randoms.uniform(numUsers);
ratedItems = userItemsCache.get(u);
if (ratedItems.size() == 0)
continue;
i = Randoms.random(ratedItems);
// sample j by popularity (probability)
itemProbs = cacheItemProbs.get(u);
double rand = Randoms.random();
double sum = 0;
for (Entry<Integer, Double> itemProb : itemProbs) {
sum += itemProb.getValue();
if (sum >= rand) {
j = itemProb.getKey();
break;
}
}
break;
}
// update parameters
double xui = predict(u, i);
double xuj = predict(u, j);
double xuij = xui - xuj;
double vals = -Math.log(g(xuij));
loss += vals;
double cmg = g(-xuij);
// update bias
double bi = itemBias.get(i), bj = itemBias.get(j);
itemBias.add(i, lRate * (cmg - regB * bi));
itemBias.add(j, lRate * (-cmg - regB * bj));
loss += regB * (bi * bi + bj * bj);
// update user/item vectors
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
P.add(u, f, lRate * (cmg * (qif - qjf) - regU * puf));
Q.add(i, f, lRate * (cmg * puf - regI * qif));
Q.add(j, f, lRate * (cmg * (-puf) - regI * qjf));
loss += regU * puf * puf + regI * qif * qif + regI * qjf * qjf;
}
}
if (isConverged(iter))
break;
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, numFactors, initLRate, maxLRate, regU, regI, regB, numIters });
}
@Override
public double predict(int u, int j) throws Exception {
return itemBias.get(j) + DenseMatrix.rowMult(P, u, Q, j);
}
}
| 5,184 | 26.433862 | 114 | java |
librec | librec-master/librec/src/main/java/librec/ranking/FISMauc.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.ArrayList;
import java.util.List;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.IterativeRecommender;
import librec.util.Randoms;
import librec.util.Strings;
/**
* Kabbur et al., <strong>FISM: Factored Item Similarity Models for Top-N Recommender Systems</strong>, KDD 2013.
*
* @author guoguibing
*
*/
@Configuration("binThold, rho, alpha, factors, lRate, maxLRate, regI, regB, iters")
public class FISMauc extends IterativeRecommender {
private int rho;
private float alpha;
public FISMauc(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
}
@Override
protected void initModel() throws Exception {
P = new DenseMatrix(numItems, numFactors);
Q = new DenseMatrix(numItems, numFactors);
P.init(smallValue);
Q.init(smallValue);
itemBias = new DenseVector(numItems);
itemBias.init(smallValue);
algoOptions = cf.getParamOptions("FISM");
rho = algoOptions.getInt("-rho");
alpha = algoOptions.getFloat("-alpha");
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// update throughout each (u, i, j) cell
for (int u : trainMatrix.rows()) {
SparseVector Ru = trainMatrix.row(u);
int[] ratedItems = Ru.getIndex();
for (VectorEntry ve : Ru) {
int i = ve.index();
double rui = ve.get();
// sample a set of items unrated by user u
List<Integer> js = new ArrayList<>();
int len = 0;
while (len < rho) {
int j = Randoms.uniform(numItems);
if (Ru.contains(j) || js.contains(j))
continue;
js.add(j);
len++;
}
double wu = Ru.getCount() - 1 > 0 ? Math.pow(Ru.getCount() - 1, -alpha) : 0;
double[] x = new double[numFactors];
// update for each unrated item
for (int j : js) {
double sum_i = 0, sum_j = 0;
for (int k : ratedItems) {
// for test, i and j will be always unequal as j is
// unrated
if (i != k)
sum_i += DenseMatrix.rowMult(P, k, Q, i);
sum_j += DenseMatrix.rowMult(P, k, Q, j);
}
double bi = itemBias.get(i), bj = itemBias.get(j);
double pui = bi + wu * sum_i;
double puj = bj + Math.pow(Ru.getCount(), -alpha) * sum_j;
double ruj = 0;
double eij = (rui - ruj) - (pui - puj);
loss += eij * eij;
// update bi
itemBias.add(i, lRate * (eij - regB * bi));
// update bj
itemBias.add(j, -lRate * (eij - regB * bj));
loss += regB * bi * bi - regB * bj * bj;
// update qif, qjf
for (int f = 0; f < numFactors; f++) {
double qif = Q.get(i, f), qjf = Q.get(j, f);
double sum_k = 0;
for (int k : ratedItems) {
if (k != i) {
sum_k += P.get(k, f);
}
}
double delta_i = eij * wu * sum_k - regI * qif;
Q.add(i, f, lRate * delta_i);
double delta_j = eij * wu * sum_k - regI * qjf;
Q.add(j, f, -lRate * delta_j);
x[f] += eij * (qif - qjf);
loss += regI * qif * qif - regI * qjf * qjf;
}
}
// update for each rated item
for (int j : ratedItems) {
if (j != i) {
for (int f = 0; f < numFactors; f++) {
double pjf = P.get(j, f);
double delta = wu * x[f] / rho - regI * pjf;
P.add(j, f, lRate * delta);
loss += regI * pjf * pjf;
}
}
}
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int i) throws Exception {
double sum = 0;
int count = 0;
List<Integer> items = userItemsCache.get(u);
for (int j : items) {
// for test, i and j will be always unequal as j is unrated
if (i != j) {
sum += DenseMatrix.rowMult(P, j, Q, i);
count++;
}
}
double wu = count > 0 ? Math.pow(count, -alpha) : 0;
return itemBias.get(i) + wu * sum;
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, rho, alpha, numFactors, initLRate, maxLRate, regI, regB,
numIters });
}
}
| 5,076 | 23.887255 | 113 | java |
librec | librec-master/librec/src/main/java/librec/ranking/LRMF.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.List;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
import librec.util.Strings;
/**
*
* Shi et al., <strong>List-wise learning to rank with matrix factorization for collaborative filtering</strong>, RecSys
* 2010.
*
* @author wubin
*
*/
@Configuration("binThold, numFactors, initLRate, maxLRate, regU, regI, numIters")
public class LRMF extends IterativeRecommender {
public DenseVector userExp;
public LRMF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
super.initModel();
userExp = new DenseVector(numUsers);
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
double ruj = me.get();
userExp.add(u, Math.exp(ruj));
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int j = me.column(); // item
double ruj = me.get();
double pred = DenseMatrix.rowMult(P, u, Q, j);
double uexp = 0;
List<Integer> items = trainMatrix.getColumns(u);
for (int i : items) {
uexp += Math.exp(DenseMatrix.rowMult(P, u, Q, i));
}
loss -= Math.exp(ruj) / userExp.get(u) * Math.log(Math.exp(pred) / uexp);
// update factors
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
double delta_u = (Math.exp(ruj) / userExp.get(u) - Math.exp(pred) / uexp) * gd(pred) * qjf - regU * puf;
double delta_j = (Math.exp(ruj) / userExp.get(u) - Math.exp(pred) / uexp) * gd(pred) * puf - regI * qjf;
P.add(u, f, lRate * delta_u);
Q.add(j, f, lRate * delta_j);
loss += 0.5 * regU * puf * puf + 0.5 * regI * qjf * qjf;
}
}
if (isConverged(iter))
break;
}// end of training
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, numFactors, initLRate, maxLRate, regU, regI, numIters }, ",");
}
}
| 3,011 | 25.892857 | 120 | java |
librec | librec-master/librec/src/main/java/librec/ranking/CLiMF.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.IterativeRecommender;
/**
* Shi et al., <strong>Climf: learning to maximize reciprocal rank with collaborative less-is-more filtering.</strong>,
* RecSys 2012.
*
* @author guoguibing
*
*/
public class CLiMF extends IterativeRecommender {
public CLiMF(SparseMatrix rm, SparseMatrix tm, int fold) {
super(rm, tm, fold);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (int u = 0; u < numUsers; u++) {
// all user u's ratings
SparseVector uv = trainMatrix.row(u);
// compute sgd for user u
double[] sgds = new double[numFactors];
for (int f = 0; f < numFactors; f++) {
double sgd = -regU * P.get(u, f);
for (int j : uv.getIndex()) {
double fuj = predict(u, j);
double qjf = Q.get(j, f);
sgd += g(-fuj) * qjf;
for (int k : uv.getIndex()) {
if (k == j)
continue;
double fuk = predict(u, k);
double qkf = Q.get(k, f);
double x = fuk - fuj;
sgd += gd(x) / (1 - g(x)) * (qjf - qkf);
}
}
sgds[f] = sgd;
}
// compute sgds for items rated by user u
Map<Integer, List<Double>> itemSgds = new HashMap<>();
for (int j : uv.getIndex()) {
//for (int j = 0; j < numItems; j++) {
double fuj = predict(u, j);
List<Double> jSgds = new ArrayList<>();
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
double yuj = uv.contains(j) ? 1.0 : 0.0;
double sgd = yuj * g(-fuj) * puf - regI * qjf;
for (int k : uv.getIndex()) {
if (k == j)
continue;
double fuk = predict(u, k);
double x = fuk - fuj;
sgd += gd(-x) * (1.0 / (1 - g(x)) - 1.0 / (1 - g(-x))) * puf;
}
jSgds.add(sgd);
}
itemSgds.put(j, jSgds);
}
// update factors
for (int f = 0; f < numFactors; f++)
P.add(u, f, lRate * sgds[f]);
for (int j = 0; j < numItems; j++) {
List<Double> jSgds = itemSgds.get(j);
for (int f = 0; f < numFactors; f++)
Q.add(j, f, lRate * jSgds.get(f));
}
// compute loss
for (int j = 0; j < numItems; j++) {
if (uv.contains(j)) {
double fuj = predict(u, j);
loss += Math.log(g(fuj));
for (int k : uv.getIndex()) {
double fuk = predict(u, k);
loss += Math.log(1 - g(fuk - fuj));
}
}
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
loss += -0.5 * (regU * puf * puf + regI * qjf * qjf);
}
}
}
if (isConverged(iter))
break;
}// end of training
}
}
| 3,608 | 22.435065 | 119 | java |
librec | librec-master/librec/src/main/java/librec/ranking/SLIM.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.SymmMatrix;
import librec.data.VectorEntry;
import librec.intf.IterativeRecommender;
import librec.util.Lists;
import librec.util.Logs;
import librec.util.Strings;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
/**
* Xia Ning and George Karypis, <strong>SLIM: Sparse Linear Methods for Top-N Recommender Systems</strong>, ICDM 2011. <br>
*
* <p>
* Related Work:
* <ul>
* <li>Levy and Jack, Efficient Top-N Recommendation by Linear Regression, ISRS 2013. This paper reports experimental
* results on the MovieLens (100K, 10M) and Epinions datasets in terms of precision, MRR and HR@N (i.e., Recall@N).</li>
* <li>Friedman et al., Regularization Paths for Generalized Linear Models via Coordinate Descent, Journal of
* Statistical Software, 2010.</li>
* </ul>
* </p>
*
* @author guoguibing
*
*/
@Configuration("binThold, knn, regL2, regL1, similarity, iters")
public class SLIM extends IterativeRecommender {
private DenseMatrix W;
// item's nearest neighbors for kNN > 0
private Multimap<Integer, Integer> itemNNs;
// item's nearest neighbors for kNN <=0, i.e., all other items
private List<Integer> allItems;
// regularization parameters for the L1 or L2 term
private float regL1, regL2;
public SLIM(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
regL1 = algoOptions.getFloat("-l1");
regL2 = algoOptions.getFloat("-l2");
}
@Override
protected void initModel() throws Exception {
W = new DenseMatrix(numItems, numItems);
W.init(); // initial guesses: make smaller guesses (e.g., W.init(0.01)) to speed up training
userCache = trainMatrix.rowCache(cacheSpec);
if (knn > 0) {
// find the nearest neighbors for each item based on item similarity
SymmMatrix itemCorrs = buildCorrs(false);
itemNNs = HashMultimap.create();
for (int j = 0; j < numItems; j++) {
// set diagonal entries to 0
W.set(j, j, 0);
// find the k-nearest neighbors for each item
Map<Integer, Double> nns = itemCorrs.row(j).toMap();
// sort by values to retriev topN similar items
if (knn > 0 && knn < nns.size()) {
List<Map.Entry<Integer, Double>> sorted = Lists.sortMap(nns, true);
List<Map.Entry<Integer, Double>> subset = sorted.subList(0, knn);
nns.clear();
for (Map.Entry<Integer, Double> kv : subset)
nns.put(kv.getKey(), kv.getValue());
}
// put into the nns multimap
for (Entry<Integer, Double> en : nns.entrySet())
itemNNs.put(j, en.getKey());
}
} else {
// all items are used
allItems = trainMatrix.columns();
for (int j = 0; j < numItems; j++)
W.set(j, j, 0.0);
}
}
@Override
protected void buildModel() throws Exception {
last_loss = 0;
// number of iteration cycles
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// each cycle iterates through one coordinate direction
for (int j = 0; j < numItems; j++) {
// find k-nearest neighbors
Collection<Integer> nns = knn > 0 ? itemNNs.get(j) : allItems;
// for each nearest neighbor i, update wij by the coordinate
// descent update rule
// it is OK if i==j, since wjj = 0;
for (Integer i : nns) {
double gradSum = 0, rateSum = 0, errs = 0;
SparseVector Ri = trainMatrix.column(i);
int N = Ri.getCount();
for (VectorEntry ve : Ri) {
int u = ve.index();
double rui = ve.get();
double ruj = trainMatrix.get(u, j);
double euj = ruj - predict(u, j, i);
gradSum += rui * euj;
rateSum += rui * rui;
errs += euj * euj;
}
gradSum /= N;
rateSum /= N;
errs /= N;
double wij = W.get(i, j);
loss += errs + 0.5 * regL2 * wij * wij + regL1 * wij;
if (regL1 < Math.abs(gradSum)) {
if (gradSum > 0) {
double update = (gradSum - regL1) / (regL2 + rateSum);
W.set(i, j, update);
} else {
// One doubt: in this case, wij<0, however, the
// paper says wij>=0. How to gaurantee that?
double update = (gradSum + regL1) / (regL2 + rateSum);
W.set(i, j, update);
}
} else {
W.set(i, j, 0.0);
}
}
}
if (isConverged(iter))
break;
}
}
/**
* @return a prediction without the contribution of excludede_item
*/
protected double predict(int u, int j, int excluded_item) throws Exception {
Collection<Integer> nns = knn > 0 ? itemNNs.get(j) : allItems;
SparseVector Ru = userCache.get(u);
double pred = 0;
for (int k : nns) {
if (Ru.contains(k) && k != excluded_item) {
double ruk = Ru.get(k);
pred += ruk * W.get(k, j);
}
}
return pred;
}
@Override
public double predict(int u, int j) throws Exception {
return predict(u, j, -1);
}
@Override
protected boolean isConverged(int iter) {
double delta_loss = last_loss - loss;
last_loss = loss;
if (verbose)
Logs.debug("{}{} iter {}: loss = {}, delta_loss = {}", algoName, foldInfo, iter, loss, delta_loss);
return iter > 1 ? delta_loss < 1e-5 : false;
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, knn, regL2, regL1, similarityMeasure, numIters });
}
}
| 6,215 | 26.75 | 123 | java |
librec | librec-master/librec/src/main/java/librec/ranking/GBPR.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.ArrayList;
import java.util.List;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.intf.SocialRecommender;
import librec.util.Randoms;
import librec.util.Strings;
/**
* Pan and Chen, <strong>GBPR: Group Preference Based Bayesian Personalized Ranking for One-Class Collaborative
* Filtering</strong>, IJCAI 2013.
*
* @author guoguibing
*
*/
public class GBPR extends SocialRecommender {
private float rho;
private int gLen;
public GBPR(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
// initialization
super.initModel();
itemBias = new DenseVector(numItems);
itemBias.init();
rho = algoOptions.getFloat("-rho");
gLen = algoOptions.getInt("-gSize");
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
itemUsersCache = trainMatrix.columnRowsCache(cacheSpec);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
DenseMatrix PS = new DenseMatrix(numUsers, numFactors);
DenseMatrix QS = new DenseMatrix(numItems, numFactors);
for (int s = 0, smax = numUsers * 100; s < smax; s++) {
// uniformly draw (u, i, g, j)
int u = 0, i = 0, j = 0;
// u
List<Integer> ratedItems = null; // row u
do {
u = Randoms.uniform(trainMatrix.numRows());
ratedItems = userItemsCache.get(u);
} while (ratedItems.size() == 0);
// i
i = Randoms.random(ratedItems);
// g
List<Integer> ws = itemUsersCache.get(i); // column i
List<Integer> g = new ArrayList<>();
if (ws.size() <= gLen) {
g.addAll(ws);
} else {
g.add(u); // u in G
while (g.size() < gLen) {
Integer w = Randoms.random(ws);
if (!g.contains(w))
g.add(w);
}
}
double pgui = predict(u, i, g);
// j
do {
j = Randoms.uniform(numItems);
} while (ratedItems.contains(j));
double puj = predict(u, j);
double pgij = pgui - puj;
double vals = -Math.log(g(pgij));
loss += vals;
double cmg = g(-pgij);
// update bi, bj
double bi = itemBias.get(i);
itemBias.add(i, lRate * (cmg - regB * bi));
loss += regB * bi * bi;
double bj = itemBias.get(j);
itemBias.add(j, lRate * (-cmg - regB * bj));
loss += regB * bj * bj;
// update Pw
double n = 1.0 / g.size();
double sum_w[] = new double[numFactors];
for (int w : g) {
double delta = w == u ? 1 : 0;
for (int f = 0; f < numFactors; f++) {
double pwf = P.get(w, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
double delta_pwf = rho * n * qif + (1 - rho) * delta * qif - delta * qjf;
PS.add(w, f, lRate * (cmg * delta_pwf - regU * pwf));
loss += regU * pwf * pwf;
sum_w[f] += pwf;
}
}
// update Qi, Qj
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
double delta_qif = rho * n * sum_w[f] + (1 - rho) * puf;
QS.add(i, f, lRate * (cmg * delta_qif - regI * qif));
loss += regI * qif * qif;
double delta_qjf = -puf;
QS.add(j, f, lRate * (cmg * delta_qjf - regI * qjf));
loss += regI * qjf * qjf;
}
}
P = P.add(PS);
Q = Q.add(QS);
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int j) {
return itemBias.get(j) + DenseMatrix.rowMult(P, u, Q, j);
}
protected double predict(int u, int j, List<Integer> g) {
double ruj = predict(u, j);
double sum = 0;
for (int w : g)
sum += DenseMatrix.rowMult(P, w, Q, j);
double rgj = sum / g.size() + itemBias.get(j);
return rho * rgj + (1 - rho) * ruj;
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, rho, gLen, numFactors, initLRate, maxLRate, regU, regI, regB,
numIters });
}
}
| 4,798 | 23.360406 | 112 | java |
librec | librec-master/librec/src/main/java/librec/ranking/FISMrmse.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.List;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.IterativeRecommender;
import librec.util.Randoms;
import librec.util.Strings;
import com.google.common.collect.Table;
import com.google.common.collect.Table.Cell;
/**
* Kabbur et al., <strong>FISM: Factored Item Similarity Models for Top-N Recommender Systems</strong>, KDD 2013.
*
* @author guoguibing
*
*/
@Configuration("binThold, rho, alpha, factors, lRate, maxLRate, regI, regB, iters")
public class FISMrmse extends IterativeRecommender {
private float rho, alpha;
private int nnz;
public FISMrmse(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
}
@Override
protected void initModel() throws Exception {
P = new DenseMatrix(numItems, numFactors);
Q = new DenseMatrix(numItems, numFactors);
P.init(0.01);
Q.init(0.01);
userBias = new DenseVector(numUsers);
itemBias = new DenseVector(numItems);
userBias.init(0.01);
itemBias.init(0.01);
nnz = trainMatrix.size();
algoOptions = cf.getParamOptions("FISM");
rho = algoOptions.getFloat("-rho");
alpha = algoOptions.getFloat("-alpha");
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
}
@Override
protected void buildModel() throws Exception {
int sampleSize = (int) (rho * nnz);
int totalSize = numUsers * numItems;
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// temporal data
DenseMatrix PS = new DenseMatrix(numItems, numFactors);
DenseMatrix QS = new DenseMatrix(numItems, numFactors);
// new training data by sampling negative values
Table<Integer, Integer, Double> R = trainMatrix.getDataTable();
// make a random sample of negative feedback (total - nnz)
List<Integer> indices = Randoms.randInts(sampleSize, 0, totalSize - nnz);
int index = 0, count = 0;
boolean isDone = false;
for (int u = 0; u < numUsers; u++) {
for (int j = 0; j < numItems; j++) {
double ruj = trainMatrix.get(u, j);
if (ruj != 0)
continue; // rated items
if (count++ == indices.get(index)) {
R.put(u, j, 0.0);
index++;
if (index >= indices.size()) {
isDone = true;
break;
}
}
}
if (isDone)
break;
}
// update throughout each user-item-rating (u, j, ruj) cell
for (Cell<Integer, Integer, Double> cell : R.cellSet()) {
int u = cell.getRowKey();
int j = cell.getColumnKey();
double ruj = cell.getValue();
// for efficiency, use the below code to predict ruj instead of
// simply using "predict(u,j)"
SparseVector Ru = trainMatrix.row(u);
double bu = userBias.get(u), bj = itemBias.get(j);
double sum_ij = 0;
int cnt = 0;
for (VectorEntry ve : Ru) {
int i = ve.index();
// for training, i and j should be equal as j may be rated
// or unrated
if (i != j) {
sum_ij += DenseMatrix.rowMult(P, i, Q, j);
cnt++;
}
}
double wu = cnt > 0 ? Math.pow(cnt, -alpha) : 0;
double puj = bu + bj + wu * sum_ij;
double euj = puj - ruj;
loss += euj * euj;
// update bu
userBias.add(u, -lRate * (euj + regB * bu));
// update bj
itemBias.add(j, -lRate * (euj + regB * bj));
loss += regB * bu * bu + regB * bj * bj;
// update qjf
for (int f = 0; f < numFactors; f++) {
double qjf = Q.get(j, f);
double sum_i = 0;
for (VectorEntry ve : Ru) {
int i = ve.index();
if (i != j) {
sum_i += P.get(i, f);
}
}
double delta = euj * wu * sum_i + regI * qjf;
QS.add(j, f, -lRate * delta);
loss += regI * qjf * qjf;
}
// update pif
for (VectorEntry ve : Ru) {
int i = ve.index();
if (i != j) {
for (int f = 0; f < numFactors; f++) {
double pif = P.get(i, f);
double delta = euj * wu * Q.get(j, f) + regI * pif;
PS.add(i, f, -lRate * delta);
loss += regI * pif * pif;
}
}
}
}
P = P.add(PS);
Q = Q.add(QS);
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int j) throws Exception {
double pred = userBias.get(u) + itemBias.get(j);
double sum = 0;
int count = 0;
List<Integer> items = userItemsCache.get(u);
for (int i : items) {
// for test, i and j will be always unequal as j is unrated
if (i != j) {
sum += DenseMatrix.rowMult(P, i, Q, j);
count++;
}
}
double wu = count > 0 ? Math.pow(count, -alpha) : 0;
return pred + wu * sum;
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, rho, alpha, numFactors, initLRate, maxLRate, regI, regB,
numIters }, ",");
}
}
| 5,629 | 24.36036 | 113 | java |
librec | librec-master/librec/src/main/java/librec/ranking/BUCM.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import static librec.util.Gamma.digamma;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.GraphicRecommender;
import librec.util.Logs;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
/**
* Bayesian UCM: Nicola Barbieri et al., <strong>Modeling Item Selection and Relevance for Accurate Recommendations: a
* Bayesian Approach</strong>, RecSys 2011.
*
* <p>
* Thank the paper authors for providing source code and for having valuable discussion.
* </p>
*
* @author Guo Guibing
*
*/
@AddConfiguration(before = "factors, alpha, beta, gamma")
public class BUCM extends GraphicRecommender {
private float initGamma;
private DenseVector gamma;
public BUCM(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
/**
* number of occurrences of entry (t, i, r)
*/
private int[][][] Nkir;
/**
* cumulative statistics of probabilities of (t, i, r)
*/
private double[][][] PkirSum;
/**
* posterior probabilities of parameters epsilon_{k, i, r}
*/
protected double[][][] Pkir;
@Override
protected void initModel() throws Exception {
// cumulative parameters
PukSum = new DenseMatrix(numUsers, numFactors);
PkiSum = new DenseMatrix(numFactors, numItems);
PkirSum = new double[numFactors][numItems][numLevels];
// initialize count variables
Nuk = new DenseMatrix(numUsers, numFactors);
Nu = new DenseVector(numUsers);
Nki = new DenseMatrix(numFactors, numItems);
Nk = new DenseVector(numFactors);
Nkir = new int[numFactors][numItems][numLevels];
alpha = new DenseVector(numFactors);
alpha.setAll(initAlpha);
beta = new DenseVector(numItems);
beta.setAll(initBeta);
gamma = new DenseVector(numLevels);
initGamma = algoOptions.getFloat("-gamma", 1.0f / numLevels);
gamma.setAll(initGamma);
// initialize topics
z = HashBasedTable.create();
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
int r = ratingScale.indexOf(rui); // rating level 0 ~ numLevels
int t = (int) (Math.random() * numFactors); // 0 ~ k-1
// assign a topic t to pair (u, i)
z.put(u, i, t);
// for users
Nuk.add(u, t, 1);
Nu.add(u, 1);
// for items
Nki.add(t, i, 1);
Nk.add(t, 1);
// for ratings
Nkir[t][i][r]++;
}
}
@Override
protected void eStep() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
double sumGamma = gamma.sum();
// collapse Gibbs sampling
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
int r = ratingScale.indexOf(rui); // rating level 0 ~ numLevels
int t = z.get(u, i);
Nuk.add(u, t, -1);
Nu.add(u, -1);
Nki.add(t, i, -1);
Nk.add(t, -1);
Nkir[t][i][r]--;
// do multinomial sampling via cumulative method:
double[] p = new double[numFactors];
double v1, v2, v3;
for (int k = 0; k < numFactors; k++) {
v1 = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha);
v2 = (Nki.get(k, i) + beta.get(i)) / (Nk.get(k) + sumBeta);
v3 = (Nkir[k][i][r] + gamma.get(r)) / (Nki.get(k, i) + sumGamma);
p[k] = v1 * v2 * v3;
}
// cumulate multinomial parameters
for (int k = 1; k < p.length; k++) {
p[k] += p[k - 1];
}
// scaled sample because of unnormalized p[], randomly sampled a new topic t
double rand = Math.random() * p[numFactors - 1];
for (t = 0; t < p.length; t++) {
if (rand < p[t])
break;
}
// new topic t
z.put(u, i, t);
// add newly estimated z_i to count variables
Nuk.add(u, t, 1);
Nu.add(u, 1);
Nki.add(t, i, 1);
Nk.add(t, 1);
Nkir[t][i][r]++;
}
}
/**
* Thomas P. Minka, Estimating a Dirichlet distribution, see Eq.(55)
*/
@Override
protected void mStep() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
double sumGamma = gamma.sum();
double ak, bi, gr;
// update alpha
for (int k = 0; k < numFactors; k++) {
ak = alpha.get(k);
double numerator = 0, denominator = 0;
for (int u = 0; u < numUsers; u++) {
numerator += digamma(Nuk.get(u, k) + ak) - digamma(ak);
denominator += digamma(Nu.get(u) + sumAlpha) - digamma(sumAlpha);
}
if (numerator != 0)
alpha.set(k, ak * (numerator / denominator));
}
// update beta
for (int i = 0; i < numItems; i++) {
bi = beta.get(i);
double numerator = 0, denominator = 0;
for (int k = 0; k < numFactors; k++) {
numerator += digamma(Nki.get(k, i) + bi) - digamma(bi);
denominator += digamma(Nk.get(k) + sumBeta) - digamma(sumBeta);
}
if (numerator != 0)
beta.set(i, bi * (numerator / denominator));
}
// update gamma
for (int r = 0; r < numLevels; r++) {
gr = gamma.get(r);
double numerator = 0, denominator = 0;
for (int i = 0; i < numItems; i++) {
for (int k = 0; k < numFactors; k++) {
numerator += digamma(Nkir[k][i][r] + gr) - digamma(gr);
denominator += digamma(Nki.get(k, i) + sumGamma) - digamma(sumGamma);
}
}
if (numerator != 0)
gamma.set(r, gr * (numerator / denominator));
}
}
@Override
protected boolean isConverged(int iter) throws Exception {
loss = 0;
// get params
estimateParams();
// compute likelihood
int count = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
int r = ratingScale.indexOf(rui);
double prob = 0;
for (int k = 0; k < numFactors; k++) {
prob += Puk.get(u, k) * Pki.get(k, i) * Pkir[k][i][r];
}
loss += -Math.log(prob);
count++;
}
loss /= count;
float delta = (float) (loss - lastLoss); // loss gets smaller, delta <= 0
Logs.debug("{}{} iter {} achieves log likelihood = {}, delta_LogLLH = {}", algoName, foldInfo, iter,
(float) loss, delta);
if (numStats > 1 && delta > 0) {
Logs.debug("{}{} has converged at iter {}", algoName, foldInfo, iter);
return true;
}
lastLoss = loss;
return false;
}
protected void readoutParams() {
double val = 0;
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
double sumGamma = gamma.sum();
for (int u = 0; u < numUsers; u++) {
for (int k = 0; k < numFactors; k++) {
val = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha);
PukSum.add(u, k, val);
}
}
for (int k = 0; k < numFactors; k++) {
for (int i = 0; i < numItems; i++) {
val = (Nki.get(k, i) + beta.get(i)) / (Nk.get(k) + sumBeta);
PkiSum.add(k, i, val);
}
}
for (int k = 0; k < numFactors; k++) {
for (int i = 0; i < numItems; i++) {
for (int r = 0; r < numLevels; r++) {
val = (Nkir[k][i][r] + gamma.get(r)) / (Nki.get(k, i) + sumGamma);
PkirSum[k][i][r] += val;
}
}
}
numStats++;
}
@Override
protected void estimateParams() {
Puk = PukSum.scale(1.0 / numStats);
Pki = PkiSum.scale(1.0 / numStats);
Pkir = new double[numFactors][numItems][numLevels];
for (int k = 0; k < numFactors; k++) {
for (int i = 0; i < numItems; i++) {
for (int r = 0; r < numLevels; r++) {
Pkir[k][i][r] = PkirSum[k][i][r] / numStats;
}
}
}
}
@Override
public double perplexity(int u, int j, double ruj) throws Exception {
int r = (int) (ruj / minRate) - 1;
double prob = 0;
for (int k = 0; k < numFactors; k++) {
prob += Puk.get(u, k) * Pki.get(k, j) * Pkir[k][j][r];
}
return -Math.log(prob);
}
@Override
public double predict(int u, int i) throws Exception {
double pred = 0, probs = 0;
for (int r = 0; r < numLevels; r++) {
double rate = ratingScale.get(r);
double prob = 0;
for (int k = 0; k < numFactors; k++) {
prob += Puk.get(u, k) * Pki.get(k, i) * Pkir[k][i][r];
}
pred += prob * rate;
probs += prob;
}
return pred / probs;
}
@Override
public double ranking(int u, int j) throws Exception {
double rank = 0;
for (int k = 0; k < numFactors; k++) {
double sum = 0;
for (int r = 0; r < numLevels; r++) {
double rate = ratingScale.get(r);
if (rate > globalMean) {
sum += Pkir[k][j][r];
}
}
rank += Puk.get(u, k) * Pki.get(k, j) * sum;
}
return rank;
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, initAlpha, initBeta, initGamma }) + ", " + super.toString();
}
}
| 9,186 | 23.696237 | 118 | java |
librec | librec-master/librec/src/main/java/librec/ranking/CPTF.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import librec.data.DenseMatrix;
import librec.data.SparseMatrix;
import librec.data.TensorEntry;
import librec.intf.TensorRecommender;
/**
* CANDECOMP/PARAFAC (CP) Tensor Factorization <br>
*
* Shao W., <strong>Tensor Completion</strong> (Section 3.2), Saarland University.
*
* @author Guo Guibing
*
*/
public class CPTF extends TensorRecommender {
// dimension-feature matrices
private DenseMatrix[] M;
public CPTF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) throws Exception {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
M = new DenseMatrix[numDimensions];
for (int d = 0; d < numDimensions; d++) {
M[d] = new DenseMatrix(dimensions[d], numFactors);
M[d].init(1, 0.1); // randomly initialization
// normalize(d);
}
}
protected void normalize(int d) {
// column-wise normalization
for (int f = 0; f < numFactors; f++) {
double norm = 0;
for (int r = 0; r < M[d].numRows(); r++) {
norm += Math.pow(M[d].get(r, f), 2);
}
norm = Math.sqrt(norm);
for (int r = 0; r < M[d].numRows(); r++) {
M[d].set(r, f, M[d].get(r, f) / norm);
}
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter < numIters; iter++) {
// SGD Optimization
loss = 0;
for (TensorEntry te : trainTensor) {
int[] keys = te.keys();
double rate = te.get();
if (rate <= 0)
continue;
double pred = predict(keys);
double e = rate - pred;
loss += e * e;
for (int f = 0; f < numFactors; f++) {
double sgd = 1;
for (int dd = 0; dd < numDimensions; dd++) {
sgd *= M[dd].get(keys[dd], f);
}
for (int d = 0; d < numDimensions; d++) {
double df = M[d].get(keys[d], f);
double gdf = sgd / df * e;
M[d].add(keys[d], f, lRate * (gdf - reg * df));
loss += reg * df * df;
}
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}
}
protected double predict(int[] keys) {
double pred = 0;
for (int f = 0; f < numFactors; f++) {
double prod = 1;
for (int d = 0; d < numDimensions; d++) {
prod *= M[d].get(keys[d], f);
}
pred += prod;
}
return pred;
}
}
| 3,248 | 23.801527 | 92 | java |
librec | librec-master/librec/src/main/java/librec/ranking/BHfree.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.GraphicRecommender;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* Barbieri et al., <strong>Balancing Prediction and Recommendation Accuracy: Hierarchical Latent Factors for Preference
* Data</strong>, SDM 2012. <br>
*
* <p>
* <strong>Remarks:</strong> this class implements the BH-free method.
* </p>
*
* @author Guo Guibing
*
*/
@AddConfiguration(before = "K, L, alpha, beta, gamma, sigma")
public class BHfree extends GraphicRecommender {
private float initGamma, initSigma;
private int K, L;
private DenseMatrix Nkl;
private int[][][] Nklr, Nkli;
private Table<Integer, Integer, Integer> Zk, Zl;
// parameters
private DenseMatrix Puk, Pkl, PukSum, PklSum;
private double[][][] Pklr, Pkli, PklrSum, PkliSum;
public BHfree(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
K = algoOptions.getInt("-k");
L = algoOptions.getInt("-l");
initAlpha = pgmOptions.getFloat("-alpha", 1.0f / K);
initBeta = pgmOptions.getFloat("-beta", 1.0f / L);
initGamma = algoOptions.getFloat("-gamma", 1.0f / numLevels);
initSigma = algoOptions.getFloat("-sigma", 1.0f / numItems);
Nuk = new DenseMatrix(numUsers, K);
Nu = new DenseVector(numUsers);
Nkl = new DenseMatrix(K, L);
Nk = new DenseVector(K);
Nklr = new int[K][L][numLevels];
Nkli = new int[K][L][numItems];
Zk = HashBasedTable.create();
Zl = HashBasedTable.create();
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rate = me.get();
int r = ratingScale.indexOf(rate);
int k = (int) (K * Math.random()); // user's topic k
int l = (int) (L * Math.random()); // item's topic l
Nuk.add(u, k, 1);
Nu.add(u, 1);
Nkl.add(k, l, 1);
Nk.add(k, 1);
Nklr[k][l][r]++;
Nkli[k][l][i]++;
Zk.put(u, i, k);
Zl.put(u, i, l);
}
// parameters
PukSum = new DenseMatrix(numUsers, K);
PklSum = new DenseMatrix(K, L);
PklrSum = new double[K][L][numLevels];
Pklr = new double[K][L][numLevels];
PkliSum = new double[K][L][numItems];
Pkli = new double[K][L][numItems];
}
@Override
protected void eStep() {
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rate = me.get();
int r = ratingScale.indexOf(rate);
int k = Zk.get(u, i);
int l = Zl.get(u, i);
Nuk.add(u, k, -1);
Nu.add(u, -1);
Nkl.add(k, l, -1);
Nk.add(k, -1);
Nklr[k][l][r]--;
Nkli[k][l][i]--;
DenseMatrix Pzw = new DenseMatrix(K, L);
double sum = 0;
for (int z = 0; z < K; z++) {
for (int w = 0; w < L; w++) {
double v1 = (Nuk.get(u, k) + initAlpha) / (Nu.get(u) + K * initAlpha);
double v2 = (Nkl.get(k, l) + initBeta) / (Nk.get(k) + L * initBeta);
double v3 = (Nklr[k][l][r] + initGamma) / (Nkl.get(k, l) + numLevels * initGamma);
double v4 = (Nkli[k][l][i] + initSigma) / (Nkl.get(k, l) + numItems * initSigma);
double val = v1 * v2 * v3 * v4;
Pzw.set(z, w, val);
sum += val;
}
}
// normalization
Pzw = Pzw.scale(1.0 / sum);
// resample k
double[] Pz = new double[K];
for (int z = 0; z < K; z++)
Pz[z] = Pzw.sumOfRow(z);
for (int z = 1; z < K; z++)
Pz[z] += Pz[z - 1];
double rand = Math.random();
for (k = 0; k < K; k++) {
if (rand < Pz[k])
break;
}
// resample l
double[] Pw = new double[L];
for (int w = 0; w < L; w++)
Pw[w] = Pzw.sumOfColumn(w);
for (int w = 1; w < L; w++)
Pw[w] += Pw[w - 1];
rand = Math.random();
for (l = 0; l < L; l++) {
if (rand < Pw[l])
break;
}
// add statistic
Nuk.add(u, k, 1);
Nu.add(u, 1);
Nkl.add(k, l, 1);
Nk.add(k, 1);
Nklr[k][l][r]++;
Nkli[k][l][i]++;
Zk.put(u, i, k);
Zl.put(u, i, l);
}
}
@Override
protected void readoutParams() {
for (int u = 0; u < numUsers; u++) {
for (int k = 0; k < K; k++) {
PukSum.add(u, k, (Nuk.get(u, k) + initAlpha) / (Nu.get(u) + K * initAlpha));
}
}
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
PklSum.add(k, l, (Nkl.get(k, l) + initBeta) / (Nk.get(k) + L * initBeta));
}
}
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
for (int r = 0; r < numLevels; r++) {
PklrSum[k][l][r] += (Nklr[k][l][r] + initGamma) / (Nkl.get(k, l) + numLevels * initGamma);
}
}
}
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
for (int i = 0; i < numItems; i++) {
PkliSum[k][l][i] += (Nkli[k][l][i] + initSigma) / (Nkl.get(k, l) + numItems * initSigma);
}
}
}
numStats++;
}
@Override
protected void estimateParams() {
double scale = 1.0 / numStats;
Puk = PukSum.scale(scale);
Pkl = PklSum.scale(scale);
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
for (int r = 0; r < numLevels; r++) {
Pklr[k][l][r] = PklrSum[k][l][r] * scale;
}
}
}
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
for (int i = 0; i < numItems; i++) {
Pkli[k][l][i] = PkliSum[k][l][i] * scale;
}
}
}
}
@Override
public double predict(int u, int j) throws Exception {
double sum = 0, probs = 0;
for (int r = 0; r < numLevels; r++) {
double rate = ratingScale.get(r);
double prob = 0;
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
prob += Puk.get(u, k) * Pkl.get(k, l) * Pklr[k][l][r];
}
}
sum += rate * prob;
probs += prob;
}
return sum / probs;
}
@Override
public double ranking(int u, int j) throws Exception {
double rank = 0;
for (int r = 0; r < numLevels; r++) {
double rate = ratingScale.get(r);
double prob = 0;
for (int k = 0; k < K; k++) {
for (int l = 0; l < L; l++) {
prob += Puk.get(u, k) * Pkl.get(k, l) * Pkli[k][l][j] * Pklr[k][l][r];
}
}
rank += rate * prob;
}
return rank;
}
@Override
public String toString() {
return Strings.toString(new Object[] { K, L, initAlpha, initBeta, initGamma, initSigma }) + ", "
+ super.toString();
}
}
| 7,089 | 22.872054 | 120 | java |
librec | librec-master/librec/src/main/java/librec/ranking/SBPR.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.SocialRecommender;
import librec.util.Randoms;
import librec.util.Strings;
/**
* Social Bayesian Personalized Ranking (SBPR)
*
* <p>
* Zhao et al., <strong>Leveraging Social Connections to Improve Personalized Ranking for Collaborative
* Filtering</strong>, CIKM 2014.
* </p>
*
* @author guoguibing
*
*/
public class SBPR extends SocialRecommender {
private Map<Integer, List<Integer>> SP;
public SBPR(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
// initialization
super.initModel();
itemBias = new DenseVector(numItems);
itemBias.init();
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
// find items rated by trusted neighbors only
SP = new HashMap<>();
for (int u = 0, um = trainMatrix.numRows(); u < um; u++) {
List<Integer> uRatedItems = userItemsCache.get(u);
if (uRatedItems.size() == 0)
continue; // no rated items
// SPu
List<Integer> trustedUsers = socialMatrix.getColumns(u);
List<Integer> items = new ArrayList<>();
for (int v : trustedUsers) {
if (v >= um) // friend v
continue;
List<Integer> vRatedItems = userItemsCache.get(v);
for (int j : vRatedItems) {
// v's rated items
if (!uRatedItems.contains(j) && !items.contains(j)) // if not rated by user u and not already added to item list
items.add(j);
}
}
SP.put(u, items);
}
}
@Override
protected void postModel() throws Exception {
SP = null; // no need for evaluation, release it.
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (int s = 0, smax = numUsers * 100; s < smax; s++) {
// uniformly draw (u, i, k, j)
int u = 0, i = 0, j = 0;
// u
List<Integer> ratedItems = null;
do {
u = Randoms.uniform(trainMatrix.numRows());
ratedItems = userItemsCache.get(u);
} while (ratedItems.size() == 0);
// i
i = Randoms.random(ratedItems);
double xui = predict(u, i);
// SPu
List<Integer> SPu = SP.get(u);
// j
do {
j = Randoms.uniform(numItems);
} while (ratedItems.contains(j) || SPu.contains(j));
double xuj = predict(u, j);
if (SPu.size() > 0) {
// if having social neighbors
int k = Randoms.random(SPu);
double xuk = predict(u, k);
SparseVector Tu = socialMatrix.row(u);
double suk = 0;
for (VectorEntry ve : Tu) {
int v = ve.index();
if (v < trainMatrix.numRows()) {
double rvk = trainMatrix.get(v, k);
if (rvk > 0)
suk += 1;
}
}
double xuik = (xui - xuk) / (1 + suk);
double xukj = xuk - xuj;
double vals = -Math.log(g(xuik)) - Math.log(g(xukj));
loss += vals;
double cik = g(-xuik), ckj = g(-xukj);
// update bi, bk, bj
double bi = itemBias.get(i);
itemBias.add(i, lRate * (cik / (1 + suk) - regB * bi));
loss += regB * bi * bi;
double bk = itemBias.get(k);
itemBias.add(k, lRate * (-cik / (1 + suk) + ckj - regB * bk));
loss += regB * bk * bk;
double bj = itemBias.get(j);
itemBias.add(j, lRate * (-ckj - regB * bj));
loss += regB * bj * bj;
// update P, Q
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f), qkf = Q.get(k, f);
double qjf = Q.get(j, f);
double delta_puf = cik * (qif - qkf) / (1 + suk) + ckj * (qkf - qjf);
P.add(u, f, lRate * (delta_puf - regU * puf));
Q.add(i, f, lRate * (cik * puf / (1 + suk) - regI * qif));
double delta_qkf = cik * (-puf / (1 + suk)) + ckj * puf;
Q.add(k, f, lRate * (delta_qkf - regI * qkf));
Q.add(j, f, lRate * (ckj * (-puf) - regI * qjf));
loss += regU * puf * puf + regI * qif * qif;
loss += regI * qkf * qkf + regI * qjf * qjf;
}
} else {
// if no social neighbors, the same as BPR
double xuij = xui - xuj;
double vals = -Math.log(g(xuij));
loss += vals;
double cij = g(-xuij);
// update bi, bj
double bi = itemBias.get(i);
itemBias.add(i, lRate * (cij - regB * bi));
loss += regB * bi * bi;
double bj = itemBias.get(j);
itemBias.add(j, lRate * (-cij - regB * bj));
loss += regB * bj * bj;
// update P, Q
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
P.add(u, f, lRate * (cij * (qif - qjf) - regU * puf));
Q.add(i, f, lRate * (cij * puf - regI * qif));
Q.add(j, f, lRate * (cij * (-puf) - regI * qjf));
loss += regU * puf * puf + regI * qif * qif + regI * qjf * qjf;
}
}
}
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int j) {
return itemBias.get(j) + DenseMatrix.rowMult(P, u, Q, j);
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, numFactors, initLRate, maxLRate, regU, regI, regB, numIters });
}
}
| 6,167 | 25.025316 | 117 | java |
librec | librec-master/librec/src/main/java/librec/ranking/RankSGD.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.IterativeRecommender;
import librec.util.Lists;
import librec.util.Randoms;
import librec.util.Strings;
/**
* Jahrer and Toscher, Collaborative Filtering Ensemble for Ranking, JMLR, 2012 (KDD Cup 2011 Track 2).
*
* @author guoguibing
*
*/
public class RankSGD extends IterativeRecommender {
// item sampling probabilities sorted ascendingly
protected List<Map.Entry<Integer, Double>> itemProbs;
public RankSGD(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
checkBinary();
}
@Override
protected void initModel() throws Exception {
super.initModel();
// compute item sampling probability
Map<Integer, Double> itemProbsMap = new HashMap<>();
for (int j = 0; j < numItems; j++) {
int users = trainMatrix.columnSize(j);
// sample items based on popularity
double prob = (users + 0.0) / numRates;
if (prob > 0)
itemProbsMap.put(j, prob);
}
itemProbs = Lists.sortMap(itemProbsMap);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// for each rated user-item (u,i) pair
for (int u : trainMatrix.rows()) {
SparseVector Ru = trainMatrix.row(u);
for (VectorEntry ve : Ru) {
// each rated item i
int i = ve.index();
double rui = ve.get();
int j = -1;
while (true) {
// draw an item j with probability proportional to
// popularity
double sum = 0, rand = Randoms.random();
for (Map.Entry<Integer, Double> en : itemProbs) {
int k = en.getKey();
double prob = en.getValue();
sum += prob;
if (sum >= rand) {
j = k;
break;
}
}
// ensure that it is unrated by user u
if (!Ru.contains(j))
break;
}
double ruj = 0;
// compute predictions
double pui = predict(u, i), puj = predict(u, j);
double e = (pui - puj) - (rui - ruj);
loss += e * e;
// update vectors
double ye = lRate * e;
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
P.add(u, f, -ye * (qif - qjf));
Q.add(i, f, -ye * puf);
Q.add(j, f, ye * puf);
}
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, initLRate, numIters }, ",");
}
}
| 3,416 | 23.76087 | 103 | java |
librec | librec-master/librec/src/main/java/librec/ranking/ItemBigram.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import static librec.util.Gamma.digamma;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.RatingContext;
import librec.data.SparseMatrix;
import librec.intf.GraphicRecommender;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
/**
* Hanna M. Wallach, <strong>Topic Modeling: Beyond Bag-of-Words</strong>, ICML 2006.
*
* @author Guo Guibing
*
*/
@AddConfiguration(before = "factors, alpha, beta")
public class ItemBigram extends GraphicRecommender {
private Map<Integer, List<Integer>> userItemsMap;
/**
* k: current topic; j: previously rated item; i: current item
*/
private int[][][] Nkji;
private DenseMatrix Nkj;
private double[][][] Pkji, PkjiSum;
private DenseMatrix beta;
public ItemBigram(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
}
@Override
protected void initModel() throws Exception {
// build the training data, sorting by date
userItemsMap = new HashMap<>();
for (int u = 0; u < numUsers; u++) {
List<Integer> unsortedItems = trainMatrix.getColumns(u);
int size = unsortedItems.size();
List<RatingContext> rcs = new ArrayList<>(size);
for (Integer i : unsortedItems) {
rcs.add(new RatingContext(u, i, (long) timeMatrix.get(u, i)));
}
Collections.sort(rcs);
List<Integer> sortedItems = new ArrayList<>(size);
for (RatingContext rc : rcs) {
sortedItems.add(rc.getItem());
}
userItemsMap.put(u, sortedItems);
}
// count variables
Nuk = new DenseMatrix(numUsers, numFactors);
Nu = new DenseVector(numUsers);
Nkji = new int[numFactors][numItems + 1][numItems];
Nkj = new DenseMatrix(numFactors, numItems + 1);
// Logs.debug("Nkji consumes {} bytes", Strings.toString(Memory.bytes(Nkji)));
// parameters
PukSum = new DenseMatrix(numUsers, numFactors);
PkjiSum = new double[numFactors][numItems + 1][numItems];
Pkji = new double[numFactors][numItems + 1][numItems];
// hyper-parameters
alpha = new DenseVector(numFactors);
alpha.setAll(initAlpha);
beta = new DenseMatrix(numFactors, numItems + 1);
beta.setAll(initBeta);
// initialization
z = HashBasedTable.create();
for (Entry<Integer, List<Integer>> en : userItemsMap.entrySet()) {
int u = en.getKey();
List<Integer> items = en.getValue();
for (int m = 0; m < items.size(); m++) {
int i = items.get(m);
int k = (int) (Math.random() * numFactors);
z.put(u, i, k);
Nuk.add(u, k, 1.0);
Nu.add(u, 1.0);
int j = m > 0 ? items.get(m - 1) : numItems;
Nkji[k][j][i]++;
Nkj.add(k, j, 1);
}
}
}
@Override
protected void eStep() {
double sumAlpha = alpha.sum();
double v1, v2;
for (Entry<Integer, List<Integer>> en : userItemsMap.entrySet()) {
int u = en.getKey();
List<Integer> items = en.getValue();
for (int m = 0; m < items.size(); m++) {
int i = items.get(m);
int k = z.get(u, i);
Nuk.add(u, k, -1.0);
Nu.add(u, -1.0);
int j = m > 0 ? items.get(m - 1) : numItems;
Nkji[k][j][i]--;
Nkj.add(k, j, -1);
double[] Pk = new double[numFactors];
for (int t = 0; t < numFactors; t++) {
v1 = (Nuk.get(u, t) + alpha.get(t)) / (Nu.get(u) + sumAlpha);
v2 = (Nkji[t][j][i] + beta.get(t, j)) / (Nkj.get(t, j) + beta.sumOfRow(t));
Pk[t] = v1 * v2;
}
for (int t = 1; t < numFactors; t++) {
Pk[t] += Pk[t - 1];
}
double rand = Math.random() * Pk[numFactors - 1];
for (k = 0; k < numFactors; k++) {
if (rand < Pk[k])
break;
}
z.put(u, i, k);
Nuk.add(u, k, 1.0);
Nu.add(u, 1.0);
Nkji[k][j][i]++;
Nkj.add(k, j, 1.0);
}
}
}
@Override
protected void mStep() {
double sumAlpha = alpha.sum();
for (int k = 0; k < numFactors; k++) {
double ak = alpha.get(k);
double numerator = 0, denominator = 0;
for (int u = 0; u < numUsers; u++) {
numerator += digamma(Nuk.get(u, k) + ak) - digamma(ak);
denominator += digamma(Nu.get(u) + sumAlpha) - digamma(sumAlpha);
}
if (numerator != 0)
alpha.set(k, ak * (numerator / denominator));
}
for (int k = 0; k < numFactors; k++) {
double bk = beta.sumOfRow(k);
for (int j = 0; j < numItems + 1; j++) {
double bkj = beta.get(k, j);
double numerator = 0, denominator = 0;
for (int i = 0; i < numItems; i++) {
numerator += digamma(Nkji[k][j][i] + bkj) - digamma(bkj);
denominator += digamma(Nkj.get(k, j) + bk) - digamma(bk);
}
if (numerator != 0)
beta.set(k, j, bkj * (numerator / denominator));
}
}
}
@Override
protected void readoutParams() {
double val = 0.0;
double sumAlpha = alpha.sum();
for (int u = 0; u < numFactors; u++) {
for (int k = 0; k < numFactors; k++) {
val = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha);
PukSum.add(u, k, val);
}
}
for (int k = 0; k < numFactors; k++) {
double bk = beta.sumOfRow(k);
for (int j = 0; j < numItems + 1; j++) {
for (int i = 0; i < numItems; i++) {
val = (Nkji[k][j][i] + beta.get(k, j)) / (Nkj.get(k, j) + bk);
PkjiSum[k][j][i] += val;
}
}
}
numStats++;
}
@Override
protected void estimateParams() {
Puk = PukSum.scale(1.0 / numStats);
for (int k = 0; k < numFactors; k++) {
for (int j = 0; j < numItems + 1; j++) {
for (int i = 0; i < numItems; i++) {
Pkji[k][j][i] = PkjiSum[k][j][i] / numStats;
}
}
}
}
@Override
public double ranking(int u, int i) throws Exception {
List<Integer> items = userItemsMap.get(u);
int j = items.size() < 1 ? numItems : items.get(items.size() - 1); // last rated item
double rank = 0;
for (int k = 0; k < numFactors; k++) {
rank += Puk.get(u, k) * Pkji[k][j][i];
}
return rank;
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, initAlpha, initBeta }) + ", " + super.toString();
}
}
| 6,874 | 24.749064 | 102 | java |
librec | librec-master/librec/src/main/java/librec/ranking/AoBPR.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.IterativeRecommender;
import librec.util.Lists;
import librec.util.Randoms;
import librec.util.Stats;
import librec.util.Strings;
/**
*
* AoBPR: BPR with Adaptive Oversampling<br>
*
* Rendle and Freudenthaler, <strong>Improving pairwise learning for item recommendation from implicit
* feedback</strong>, WSDM 2014.
*
* @author zhouge
*
*/
public class AoBPR extends IterativeRecommender {
private static int loopNumber;
private static int lamda_Item;
private double[] var;
private int[][] factorRanking;
private double[] RankingPro;
public AoBPR(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
super.initModel();
//set for this alg
lamda_Item = (int) (algoOptions.getFloat("-lambda") * numItems);
//lamda_Item=500;
loopNumber = (int) (numItems * Math.log(numItems));
var = new double[numFactors];
factorRanking = new int[numFactors][numItems];
RankingPro = new double[numItems];
double sum = 0;
for (int i = 0; i < numItems; i++) {
RankingPro[i] = Math.exp(-(i + 1) / lamda_Item);
sum += RankingPro[i];
}
for (int i = 0; i < numItems; i++) {
RankingPro[i] /= sum;
}
}
@Override
protected void buildModel() throws Exception {
int countIter = 0;
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (int s = 0, smax = numUsers * 100; s < smax; s++) {
//update Ranking every |I|log|I|
if (countIter % loopNumber == 0) {
updateRankingInFactor();
countIter = 0;
}
countIter++;
// randomly draw (u, i, j)
int u = 0, i = 0, j = 0;
while (true) {
//random draw an u and i by uniformly
u = Randoms.uniform(numUsers);
SparseVector pu = trainMatrix.row(u);
if (pu.getCount() == 0)
continue;
int[] is = pu.getIndex();
i = is[Randoms.uniform(is.length)];
do {
//randoms get a r by exp(-r/lamda)
int randomJIndex = 0;
do {
randomJIndex = Randoms.discrete(RankingPro);
} while (randomJIndex > numItems);
//randoms get a f by p(f|c)
double[] pfc = new double[numFactors];
double sumfc = 0;
for (int index = 0; index < numFactors; index++) {
double temp = Math.abs(P.get(u, index));
sumfc += temp * var[index];
pfc[index] = temp * var[index];
}
for (int index = 0; index < numFactors; index++) {
pfc[index] /= sumfc;
}
int f = Randoms.discrete(pfc);
//get the r-1 in f item
if (P.get(u, f) > 0) {
j = factorRanking[f][randomJIndex];
} else {
j = factorRanking[f][numItems - randomJIndex - 1];
}
} while (pu.contains(j));
break;
}
// update parameters
double xui = predict(u, i);
double xuj = predict(u, j);
double xuij = xui - xuj;
double vals = -Math.log(g(xuij));
loss += vals;
double cmg = g(-xuij);
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qif = Q.get(i, f);
double qjf = Q.get(j, f);
P.add(u, f, lRate * (cmg * (qif - qjf) - regU * puf));
Q.add(i, f, lRate * (cmg * puf - regI * qif));
Q.add(j, f, lRate * (cmg * (-puf) - regI * qjf));
loss += regU * puf * puf + regI * qif * qif + regI * qjf * qjf;
}
}
if (isConverged(iter))
break;
}
}
public void updateRankingInFactor() {
//echo for each factors
for (int factorIndex = 0; factorIndex < numFactors; factorIndex++) {
DenseVector factorVector = Q.column(factorIndex).clone();
List<Entry<Integer, Double>> sort = sortByDenseVectorValue(factorVector);
double[] valueList = new double[numItems];
for (int i = 0; i < numItems; i++) {
factorRanking[factorIndex][i] = sort.get(i).getKey();
valueList[i] = sort.get(i).getValue();
}
//get
var[factorIndex] = Stats.var(valueList);
}
}
public List<Entry<Integer, Double>> sortByDenseVectorValue(DenseVector vector) {
Map<Integer, Double> keyValPair = new HashMap<>();
for (int i = 0, length = vector.getData().length; i < length; i++) {
keyValPair.put(i, vector.get(i));
}
return Lists.sortMap(keyValPair, true);
}
@Override
public String toString() {
return Strings
.toString(new Object[] { binThold, numFactors, initLRate, regU, regI, numIters, lamda_Item }, ",");
}
}
| 5,381 | 25.776119 | 103 | java |
librec | librec-master/librec/src/main/java/librec/ranking/RankALS.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ranking;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.IterativeRecommender;
import librec.util.Logs;
import librec.util.Strings;
/**
* Takacs and Tikk, <strong>Alternating Least Squares for Personalized Ranking</strong>, RecSys 2012.
*
* @author guoguibing
*
*/
@Configuration("binThold, factors, isSupportWeight, numIters")
public class RankALS extends IterativeRecommender {
// whether support based weighting is used ($s_i=|U_i|$) or not ($s_i=1$)
private boolean isSupportWeight;
private DenseVector s;
private double sum_s;
public RankALS(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = true;
checkBinary();
}
@Override
protected void initModel() throws Exception {
super.initModel();
isSupportWeight = algoOptions.isOn("-sw");
s = new DenseVector(numItems);
sum_s = 0;
for (int i = 0; i < numItems; i++) {
double si = isSupportWeight ? trainMatrix.columnSize(i) : 1;
s.set(i, si);
sum_s += si;
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter < numIters; iter++) {
if (verbose)
Logs.debug("{}{} runs at iter = {}/{}", algoName, foldInfo, iter, numIters);
// P step: update user vectors
DenseVector sum_sq = new DenseVector(numFactors);
DenseMatrix sum_sqq = new DenseMatrix(numFactors, numFactors);
for (int j = 0; j < numItems; j++) {
DenseVector qj = Q.row(j);
double sj = s.get(j);
sum_sq = sum_sq.add(qj.scale(sj));
sum_sqq = sum_sqq.add(qj.outer(qj).scale(sj));
}
List<Integer> cus = trainMatrix.rows(); // list of users with
// $c_ui=1$
for (int u : cus) {
// for each user
DenseMatrix sum_cqq = new DenseMatrix(numFactors, numFactors);
DenseVector sum_cq = new DenseVector(numFactors);
DenseVector sum_cqr = new DenseVector(numFactors);
DenseVector sum_sqr = new DenseVector(numFactors);
SparseVector Ru = trainMatrix.row(u);
double sum_c = Ru.getCount();
double sum_sr = 0, sum_cr = 0;
for (VectorEntry ve : Ru) {
int i = ve.index();
double rui = ve.get();
// double cui = 1;
DenseVector qi = Q.row(i);
sum_cqq = sum_cqq.add(qi.outer(qi));
sum_cq = sum_cq.add(qi);
sum_cqr = sum_cqr.add(qi.scale(rui));
// ratings of unrated items will be 0
double si = s.get(i);
sum_sr += si * rui;
sum_cr += rui;
sum_sqr = sum_sqr.add(qi.scale(si * rui));
}
DenseMatrix M = sum_cqq.scale(sum_s).minus(sum_cq.outer(sum_sq)).minus(sum_sq.outer(sum_cq))
.add(sum_sqq.scale(sum_c));
DenseVector y = sum_cqr.scale(sum_s).minus(sum_cq.scale(sum_sr)).minus(sum_sq.scale(sum_cr))
.add(sum_sqr.scale(sum_c));
DenseVector pu = M.inv().mult(y);
P.setRow(u, pu);
}
// Q step: update item vectors
Map<Integer, Double> m_sum_sr = new HashMap<>();
Map<Integer, Double> m_sum_cr = new HashMap<>();
Map<Integer, Double> m_sum_c = new HashMap<>();
Map<Integer, DenseVector> m_sum_cq = new HashMap<>();
for (int u : cus) {
SparseVector Ru = trainMatrix.row(u);
double sum_sr = 0, sum_cr = 0, sum_c = Ru.getCount();
DenseVector sum_cq = new DenseVector(numFactors);
for (VectorEntry ve : Ru) {
int j = ve.index();
double ruj = ve.get();
double sj = s.get(j);
sum_sr += sj * ruj;
sum_cr += ruj;
sum_cq = sum_cq.add(Q.row(j));
}
m_sum_sr.put(u, sum_sr);
m_sum_cr.put(u, sum_cr);
m_sum_c.put(u, sum_c);
m_sum_cq.put(u, sum_cq);
}
for (int i = 0; i < numItems; i++) {
// for each item
DenseMatrix sum_cpp = new DenseMatrix(numFactors, numFactors);
DenseMatrix sum_p_p_c = new DenseMatrix(numFactors, numFactors);
DenseVector sum_p_p_cq = new DenseVector(numFactors);
DenseVector sum_cpr = new DenseVector(numFactors);
DenseVector sum_c_sr_p = new DenseVector(numFactors);
DenseVector sum_cr_p = new DenseVector(numFactors);
DenseVector sum_p_r_c = new DenseVector(numFactors);
double si = s.get(i);
for (int u : cus) {
DenseVector pu = P.row(u);
double rui = trainMatrix.get(u, i);
DenseMatrix pp = pu.outer(pu);
sum_cpp = sum_cpp.add(pp);
sum_p_p_cq = sum_p_p_cq.add(pp.mult(m_sum_cq.get(u)));
sum_p_p_c = sum_p_p_c.add(pp.scale(m_sum_c.get(u)));
sum_cr_p = sum_cr_p.add(pu.scale(m_sum_cr.get(u)));
if (rui > 0) {
sum_cpr = sum_cpr.add(pu.scale(rui));
sum_c_sr_p = sum_c_sr_p.add(pu.scale(m_sum_sr.get(u)));
sum_p_r_c = sum_p_r_c.add(pu.scale(rui * m_sum_c.get(u)));
}
}
DenseMatrix M = sum_cpp.scale(sum_s).add(sum_p_p_c.scale(si));
DenseVector y = sum_cpp.mult(sum_sq).add(sum_cpr.scale(sum_s)).minus(sum_c_sr_p)
.add(sum_p_p_cq.scale(si)).minus(sum_cr_p.scale(si)).add(sum_p_r_c.scale(si));
DenseVector qi = M.inv().mult(y);
Q.setRow(i, qi);
}
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { binThold, numFactors, isSupportWeight, numIters });
}
}
| 6,025 | 28.539216 | 101 | java |
librec | librec-master/librec/src/main/java/librec/rating/SoReg.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.SocialRecommender;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* Hao Ma, Dengyong Zhou, Chao Liu, Michael R. Lyu and Irwin King, <strong>Recommender systems with social
* regularization</strong>, WSDM 2011.<br>
*
* <p>
* In the original paper, this method is named as "SR2_pcc". For consistency, we rename it as "SoReg" as used by some
* other papers such as: Tang et al., <strong>Exploiting Local and Global Social Context for Recommendation</strong>,
* IJCAI 2013.
* </p>
*
* @author guoguibing
*
*/
@AddConfiguration(before = "beta")
public class SoReg extends SocialRecommender {
private Table<Integer, Integer, Double> userCorrs;
private float beta;
public SoReg(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
super.initModel();
userCorrs = HashBasedTable.create();
beta = algoOptions.getFloat("-beta");
}
/**
* compute similarity between users u and v
*/
protected double similarity(Integer u, Integer v) {
if (userCorrs.contains(u, v))
return userCorrs.get(u, v);
if (userCorrs.contains(v, u))
return userCorrs.get(v, u);
double sim = Double.NaN;
if (u < trainMatrix.numRows() && v < trainMatrix.numRows()) {
SparseVector uv = trainMatrix.row(u);
if (uv.getCount() > 0) {
SparseVector vv = trainMatrix.row(v);
sim = correlation(uv, vv, "pcc"); // could change to other measures
if (!Double.isNaN(sim))
sim = (1.0 + sim) / 2;
}
}
userCorrs.put(u, v, sim);
return sim;
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// temp data
DenseMatrix PS = new DenseMatrix(numUsers, numFactors);
DenseMatrix QS = new DenseMatrix(numItems, numFactors);
// ratings
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j);
double euj = pred - ruj;
loss += euj * euj;
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
PS.add(u, f, euj * qjf + regU * puf);
QS.add(j, f, euj * puf + regI * qjf);
loss += regU * puf * puf + regI * qjf * qjf;
}
}
// friends
for (int u = 0; u < numUsers; u++) {
// out links: F+
SparseVector uos = socialMatrix.row(u);
for (int k : uos.getIndex()) {
double suk = similarity(u, k);
if (!Double.isNaN(suk)) {
for (int f = 0; f < numFactors; f++) {
double euk = P.get(u, f) - P.get(k, f);
PS.add(u, f, beta * suk * euk);
loss += beta * suk * euk * euk;
}
}
}
// in links: F-
SparseVector uis = socialMatrix.column(u);
for (int g : uis.getIndex()) {
double sug = similarity(u, g);
if (!Double.isNaN(sug)) {
for (int f = 0; f < numFactors; f++) {
double eug = P.get(u, f) - P.get(g, f);
PS.add(u, f, beta * sug * eug);
}
}
}
} // end of for loop
P = P.add(PS.scale(-lRate));
Q = Q.add(QS.scale(-lRate));
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public String toString() {
return beta + ", " + super.toString();
}
}
| 4,273 | 23.994152 | 117 | java |
librec | librec-master/librec/src/main/java/librec/rating/ItemKNN.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.data.Configuration;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.SymmMatrix;
import librec.intf.Recommender;
import librec.util.Lists;
import librec.util.Stats;
import librec.util.Strings;
/**
* <h3>Item-based Nearest Neighbors</h3>
*
* <p>
* It supports both recommendation tasks: (1) rating prediction; and (2) item ranking (by configuring
* {@code isRankingPred=on} in the librec.conf). For item ranking, the returned score is the summation of the
* similarities of nearest neighbors (see Section 4.3.2 of Rendle et al., BPR: Bayesian Personalized Ranking from
* Implicit Feedback, UAI 2009).
* </p>
*
* <p>
* When the number of items is extremely large which makes it memory intensive to store/precompute all item-item
* correlations, a trick presented by (Jahrer and Toscher, Collaborative Filtering Ensemble, JMLR 2012) can be applied.
* Specifically, we can use a basic SVD model to obtain item-feature vectors, and then item-item correlations can be
* computed by Eqs (13, 15).
* </p>
*
* @author guoguibing
*
*/
@Configuration("knn, similarity, shrinkage")
public class ItemKNN extends Recommender {
// user: nearest neighborhood
private SymmMatrix itemCorrs;
private DenseVector itemMeans;
public ItemKNN(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
itemCorrs = buildCorrs(false);
itemMeans = new DenseVector(numItems);
for (int i = 0; i < numItems; i++) {
SparseVector vs = trainMatrix.column(i);
itemMeans.set(i, vs.getCount() > 0 ? vs.mean() : globalMean);
}
}
@Override
public double predict(int u, int j) {
// find a number of similar items
Map<Integer, Double> nns = new HashMap<>();
if (u < trainMatrix.numRows()) {
SparseVector dv = itemCorrs.row(j);
for (int i : dv.getIndex()) {
double sim = dv.get(i);
double rate = trainMatrix.get(u, i);
if (isRankingPred && rate > 0)
nns.put(i, sim);
else if (sim > 0 && rate > 0)
nns.put(i, sim);
}
// topN similar items
if (knn > 0 && knn < nns.size()) {
List<Map.Entry<Integer, Double>> sorted = Lists.sortMap(nns, true);
List<Map.Entry<Integer, Double>> subset = sorted.subList(0, knn);
nns.clear();
for (Map.Entry<Integer, Double> kv : subset)
nns.put(kv.getKey(), kv.getValue());
}
} // end if row outside of matrix
if (nns.size() == 0)
return isRankingPred ? 0 : globalMean;
if (isRankingPred) {
// for recommendation task: item ranking
return Stats.sum(nns.values());
} else {
// for recommendation task: rating prediction
double sum = 0, ws = 0;
for (Entry<Integer, Double> en : nns.entrySet()) {
int i = en.getKey();
double sim = en.getValue();
double rate = trainMatrix.get(u, i);
sum += sim * (rate - itemMeans.get(i));
ws += Math.abs(sim);
}
return ws > 0 ? itemMeans.get(j) + sum / ws : globalMean;
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { knn, similarityMeasure, similarityShrinkage });
}
}
| 4,232 | 30.355556 | 119 | java |
librec | librec-master/librec/src/main/java/librec/rating/LDCC.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.GraphicRecommender;
import librec.util.Logs;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* LDCC: Bayesian Co-clustering (BCC) with Gibbs sampling <br>
*
* Wang et al., <strong>Latent Dirichlet Bayesian Co-Clustering</strong>, Machine Learning and Knowledge Discovery in
* Databases, 2009.
*
* @author Guo Guibing
*
*/
@AddConfiguration(before = "Ku, Kv, au, av, beta")
public class LDCC extends GraphicRecommender {
private Table<Integer, Integer, Integer> Zu, Zv;
private DenseMatrix Nui, Nvj;
private DenseVector Nv;
private int[][][] Nijl;
private DenseMatrix Nij;
private int Ku, Kv;
private float au, av, bl;
// parameters
private DenseMatrix PIu, PIv, PIuSum, PIvSum;
private double[][][] Pijl, PijlSum;
public LDCC(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
Ku = algoOptions.getInt("-ku", numFactors);
Kv = algoOptions.getInt("-kv", numFactors);
Nui = new DenseMatrix(numUsers, Ku);
Nu = new DenseVector(numUsers);
Nvj = new DenseMatrix(numItems, Kv);
Nv = new DenseVector(numItems);
Nijl = new int[Ku][Kv][numLevels];
Nij = new DenseMatrix(Ku, Kv);
au = algoOptions.getFloat("-au", 1.0f / Ku); // alpha for user
av = algoOptions.getFloat("-av", 1.0f / Kv); // alpha for item
bl = algoOptions.getFloat("-beta", 1.0f / numLevels); // beta for rating levels
Zu = HashBasedTable.create();
Zv = HashBasedTable.create();
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int v = me.column();
double rate = me.get();
int l = ratingScale.indexOf(rate);
int i = (int) (Ku * Math.random());
int j = (int) (Kv * Math.random());
Nui.add(u, i, 1);
Nu.add(u, 1);
Nvj.add(v, j, 1);
Nv.add(v, 1);
Nijl[i][j][l]++;
Nij.add(i, j, 1);
Zu.put(u, v, i);
Zv.put(u, v, j);
}
// parameters
PIuSum = new DenseMatrix(numUsers, Ku);
PIvSum = new DenseMatrix(numItems, Kv);
Pijl = new double[Ku][Kv][numLevels];
PijlSum = new double[Ku][Kv][numLevels];
}
@Override
protected void eStep() {
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int v = me.column();
double rate = me.get();
int l = ratingScale.indexOf(rate);
// user and item's factors
int i = Zu.get(u, v);
int j = Zv.get(u, v);
// remove this observation
Nui.add(u, i, -1);
Nu.add(u, -1);
Nvj.add(v, j, -1);
Nv.add(v, -1);
Nijl[i][j][l]--;
Nij.add(i, j, -1);
// compute P(i, j)
DenseMatrix probs = new DenseMatrix(Ku, Kv);
double sum = 0;
for (int m = 0; m < Ku; m++) {
for (int n = 0; n < Kv; n++) {
// compute Pmn
double v1 = (Nui.get(u, m) + au) / (Nu.get(u) + Ku * au);
double v2 = (Nvj.get(v, n) + av) / (Nv.get(v) + Kv * av);
double v3 = (Nijl[m][n][l] + bl) / (Nij.get(m, n) + numLevels * bl);
double prob = v1 * v2 * v3;
probs.set(m, n, prob);
sum += prob;
}
}
probs = probs.scale(1.0 / sum);
// re-sample user factor
double[] Pu = new double[Ku];
for (int m = 0; m < Ku; m++) {
Pu[m] = probs.sumOfRow(m);
}
for (int m = 1; m < Ku; m++) {
Pu[m] += Pu[m - 1];
}
double rand = Math.random();
for (i = 0; i < Ku; i++) {
if (rand < Pu[i])
break;
}
// re-sample item factor
double[] Pv = new double[Kv];
for (int n = 0; n < Kv; n++) {
Pv[n] = probs.sumOfColumn(n);
}
for (int n = 1; n < Kv; n++) {
Pv[n] += Pv[n - 1];
}
rand = Math.random();
for (j = 0; j < Kv; j++) {
if (rand < Pv[j])
break;
}
// add statistics
Nui.add(u, i, 1);
Nu.add(u, 1);
Nvj.add(v, j, 1);
Nv.add(v, 1);
Nijl[i][j][l]++;
Nij.add(i, j, 1);
Zu.put(u, v, i);
Zv.put(u, v, j);
}
}
@Override
protected void readoutParams() {
for (int u = 0; u < numUsers; u++) {
for (int i = 0; i < Ku; i++) {
PIuSum.add(u, i, (Nui.get(u, i) + au) / (Nu.get(u) + Ku * au));
}
}
for (int v = 0; v < numItems; v++) {
for (int j = 0; j < Kv; j++) {
PIvSum.add(v, j, (Nvj.get(v, j) + av) / (Nv.get(v) + Kv * av));
}
}
for (int i = 0; i < Ku; i++) {
for (int j = 0; j < Kv; j++) {
for (int l = 0; l < numLevels; l++) {
PijlSum[i][j][l] += (Nijl[i][j][l] + bl) / (Nij.get(i, j) + numLevels * bl);
}
}
}
numStats++;
}
@Override
protected void estimateParams() {
PIu = PIuSum.scale(1.0 / numStats);
PIv = PIvSum.scale(1.0 / numStats);
for (int i = 0; i < Ku; i++) {
for (int j = 0; j < Kv; j++) {
for (int l = 0; l < numLevels; l++) {
Pijl[i][j][l] = PijlSum[i][j][l] / numStats;
}
}
}
}
@Override
protected boolean isConverged(int iter) throws Exception {
// get the parameters
estimateParams();
// compute the perplexity
int N = 0;
double sum = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int v = me.column();
double ruv = me.get();
sum += perplexity(u, v, ruv);
N++;
}
double perp = Math.exp(sum / N);
double delta = perp - loss; // perplexity should get smaller and smaller --> delta<0
Logs.debug("{}{} iter {} achieves perplexity = {}, delta_perp = {}", algoName, foldInfo, iter, perp, delta);
if (numStats > 1 && delta > 0)
return true;
loss = perp;
return false;
}
@Override
public double perplexity(int u, int v, double pred) throws Exception {
int l = (int) (pred / minRate - 1);
// compute p(r|u,v)
double prob = 0;
for (int i = 0; i < Ku; i++) {
for (int j = 0; j < Kv; j++) {
prob += Pijl[i][j][l] * PIu.get(u, i) * PIv.get(v, j);
}
}
return -Math.log(prob);
}
@Override
public double predict(int u, int v) throws Exception {
double pred = 0;
for (int l = 0; l < numLevels; l++) {
double rate = ratingScale.get(l);
double prob = 0; // P(r|u,v)=\sum_{i,j} P(r|i,j)P(i|u)P(j|v)
for (int i = 0; i < Ku; i++) {
for (int j = 0; j < Kv; j++) {
prob += Pijl[i][j][l] * PIu.get(u, i) * PIv.get(v, j);
}
}
pred += rate * prob;
}
return pred;
}
@Override
public String toString() {
return Strings.toString(new Object[] { Ku, Kv, au, av, bl }) + ", " + super.toString();
}
}
| 7,196 | 22.291262 | 117 | java |
librec | librec-master/librec/src/main/java/librec/rating/URP.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import static librec.util.Gamma.digamma;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.GraphicRecommender;
import librec.util.Logs;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
/**
* User Rating Profile: a LDA model for rating prediction. <br>
*
* Benjamin Marlin, <strong>Modeling user rating profiles for collaborative filtering</strong>, NIPS 2003.<br>
*
* Nicola Barbieri, <strong>Regularized gibbs sampling for user profiling with soft constraints</strong>, ASONAM 2011.
*
* @author Guo Guibing
*
*/
@AddConfiguration(before = "factors, alpha, beta")
public class URP extends GraphicRecommender {
private double preRMSE;
public URP(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
/**
* number of occurrences of entry (t, i, r)
*/
private int[][][] Nkir;
/**
* cumulative statistics of probabilities of (t, i, r)
*/
private double[][][] PkirSum;
/**
* posterior probabilities of parameters phi_{k, i, r}
*/
protected double[][][] Pkir;
@Override
protected void initModel() throws Exception {
// cumulative parameters
PukSum = new DenseMatrix(numUsers, numFactors);
PkirSum = new double[numFactors][numItems][numLevels];
// initialize count variables
Nuk = new DenseMatrix(numUsers, numFactors);
Nu = new DenseVector(numUsers);
Nkir = new int[numFactors][numItems][numLevels];
Nki = new DenseMatrix(numFactors, numItems);
alpha = new DenseVector(numFactors);
alpha.setAll(initAlpha);
beta = new DenseVector(numLevels);
beta.setAll(initBeta);
// initialize topics
z = HashBasedTable.create();
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
int r = ratingScale.indexOf(rui); // rating level 0 ~ numLevels
int t = (int) (Math.random() * numFactors); // 0 ~ k-1
// assign a topic t to pair (u, i)
z.put(u, i, t);
// number of pairs (u, t) in (u, i, t)
Nuk.add(u, t, 1);
// total number of items of user u
Nu.add(u, 1);
// number of pairs (t, i, r)
Nkir[t][i][r]++;
// total number of words assigned to topic t
Nki.add(t, i, 1);
}
}
@Override
protected void eStep() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
// collapse Gibbs sampling
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
int r = (int) (rui / minRate - 1); // rating level 0 ~ numLevels
int t = z.get(u, i);
Nuk.add(u, t, -1);
Nu.add(u, -1);
Nkir[t][i][r]--;
Nki.add(t, i, -1);
// do multinomial sampling via cumulative method:
double[] p = new double[numFactors];
for (int k = 0; k < numFactors; k++) {
p[k] = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha) * (Nkir[k][i][r] + beta.get(r))
/ (Nki.get(k, i) + sumBeta);
}
// cumulate multinomial parameters
for (int k = 1; k < p.length; k++) {
p[k] += p[k - 1];
}
// scaled sample because of unnormalized p[], randomly sampled a new topic t
double rand = Math.random() * p[numFactors - 1];
for (t = 0; t < p.length; t++) {
if (rand < p[t])
break;
}
// new topic t
z.put(u, i, t);
// add newly estimated z_i to count variables
Nuk.add(u, t, 1);
Nu.add(u, 1);
Nkir[t][i][r]++;
Nki.add(t, i, 1);
}
}
/**
* Thomas P. Minka, Estimating a Dirichlet distribution, see Eq.(55)
*/
@Override
protected void mStep() {
double sumAlpha = alpha.sum();
double sumBeta = beta.sum();
double ak, br;
// update alpha vector
for (int k = 0; k < numFactors; k++) {
ak = alpha.get(k);
double numerator = 0, denominator = 0;
for (int u = 0; u < numUsers; u++) {
numerator += digamma(Nuk.get(u, k) + ak) - digamma(ak);
denominator += digamma(Nu.get(u) + sumAlpha) - digamma(sumAlpha);
}
if (numerator != 0)
alpha.set(k, ak * (numerator / denominator));
}
// update beta_k
for (int r = 0; r < numLevels; r++) {
br = beta.get(r);
double numerator = 0, denominator = 0;
for (int i = 0; i < numItems; i++) {
for (int k = 0; k < numFactors; k++) {
numerator += digamma(Nkir[k][i][r] + br) - digamma(br);
denominator += digamma(Nki.get(k, i) + sumBeta) - digamma(sumBeta);
}
}
if (numerator != 0)
beta.set(r, br * (numerator / denominator));
}
}
protected void readoutParams() {
double val = 0;
double sumAlpha = alpha.sum();
for (int u = 0; u < numUsers; u++) {
for (int k = 0; k < numFactors; k++) {
val = (Nuk.get(u, k) + alpha.get(k)) / (Nu.get(u) + sumAlpha);
PukSum.add(u, k, val);
}
}
double sumBeta = beta.sum();
for (int k = 0; k < numFactors; k++) {
for (int i = 0; i < numItems; i++) {
for (int r = 0; r < numLevels; r++) {
val = (Nkir[k][i][r] + beta.get(r)) / (Nki.get(k, i) + sumBeta);
PkirSum[k][i][r] += val;
}
}
}
numStats++;
}
@Override
protected void estimateParams() {
Puk = PukSum.scale(1.0 / numStats);
Pkir = new double[numFactors][numItems][numLevels];
for (int k = 0; k < numFactors; k++) {
for (int i = 0; i < numItems; i++) {
for (int r = 0; r < numLevels; r++) {
Pkir[k][i][r] = PkirSum[k][i][r] / numStats;
}
}
}
}
@Override
protected boolean isConverged(int iter) throws Exception {
if (validationMatrix == null)
return false;
// get posterior probability distribution first
estimateParams();
// compute current RMSE
int numCount = 0;
double sum = 0;
for (MatrixEntry me : validationMatrix) {
double rate = me.get();
int u = me.row();
int j = me.column();
double pred = predict(u, j, true);
if (Double.isNaN(pred))
continue;
double err = rate - pred;
sum += err * err;
numCount++;
}
double RMSE = Math.sqrt(sum / numCount);
double delta = RMSE - preRMSE;
if (verbose) {
Logs.debug("{}{} iter {} achieves RMSE = {}, delta_RMSE = {}", algoName, foldInfo, iter, (float) RMSE,
(float) (delta));
}
if (numStats > 1 && delta > 0)
return true;
preRMSE = RMSE;
return false;
}
@Override
public double predict(int u, int i) throws Exception {
double pred = 0;
for (int r = 0; r < numLevels; r++) {
double rate = ratingScale.get(r);
double prob = 0;
for (int k = 0; k < numFactors; k++) {
prob += Puk.get(u, k) * Pkir[k][i][r];
}
pred += prob * rate;
}
return pred;
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, alpha, beta }) + ", " + super.toString();
}
}
| 7,462 | 23.711921 | 118 | java |
librec | librec-master/librec/src/main/java/librec/rating/LLORMAUpdater.java | package librec.rating;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.util.Randoms;
/**
* <h3>Local Low-Rank Matrix Approximation</h3>
* <p>
* This implementation refers to the method proposed by Lee et al. at ICML 2013.
* <p>
* <strong>Lcoal Structure:</strong> Joonseok Lee, <strong>Local Low-Rank Matrix
* Approximation </strong>, ICML. 2013: 82-90.
*
* @author wkq
*/
public class LLORMAUpdater extends Thread {
/**
* The unique identifier of the thread.
*/
private int threadId;
/**
* The number of features.
*/
private int numFactors;
/**
* The number of users.
*/
private int userCount;
/**
* The number of items.
*/
private int itemCount;
/**
* The anchor user used to learn this local model.
*/
private int anchorUser;
/**
* The anchor item used to learn this local model.
*/
private int anchorItem;
/**
* Learning rate parameter.
*/
public double lRate;
/**
* The maximum number of iteration.
*/
public int maxIter;
/**
* Regularization factor parameter.
*/
public double regU, regI;
/**
* The vector containing each user's weight.
*/
private DenseVector w;
/**
* The vector containing each item's weight.
*/
private DenseVector v;
/**
* User profile in low-rank matrix form.
*/
private DenseMatrix P;
/**
* Item profile in low-rank matrix form.
*/
private DenseMatrix Q;
/**
* The rating matrix used for learning.
*/
private SparseMatrix trainMatrix;
/**
* The current train error.
*/
private double trainErr;
/**
* Construct a local model for singleton LLORMA.
*
* @param id
* A unique thread ID.
* @param rk
* The rank which will be used in this local model.
* @param u
* The number of users.
* @param i
* The number of items.
* @param au
* The anchor user used to learn this local model.
* @param ai
* The anchor item used to learn this local model.
* @param lr
* Learning rate parameter.
* @param w0
* Initial vector containing each user's weight.
* @param v0
* Initial vector containing each item's weight.
* @param rm
* The rating matrix used for learning.
*/
public LLORMAUpdater(int tid, int rk, int uc, int ic, int au, int ai, double lr, double regU, double regI, int iter,
DenseVector w, DenseVector v, SparseMatrix rm) {
threadId = tid;
numFactors = rk;
userCount = uc;
itemCount = ic;
anchorUser = au;
anchorItem = ai;
lRate = lr;
this.regU = regU;
this.regI = regI;
maxIter = iter;
this.w = w;
this.v = v;
P = new DenseMatrix(userCount, numFactors);
Q = new DenseMatrix(itemCount, numFactors);
trainMatrix = rm;
}
/**
* Getter method for thread ID.
*
* @return The thread ID of this local model.
*/
public int getThreadId() {
return threadId;
}
/**
* Getter method for rank of this local model.
*
* @return The rank of this local model.
*/
public int getRank() {
return numFactors;
}
/**
* Getter method for anchor user of this local model.
*
* @return The anchor user ID of this local model.
*/
public int getAnchorUser() {
return anchorUser;
}
/**
* Getter method for anchor item of this local model.
*
* @return The anchor item ID of this local model.
*/
public int getAnchorItem() {
return anchorItem;
}
/**
* Getter method for user profile of this local model.
*
* @return The user profile of this local model.
*/
public DenseMatrix getUserFeatures() {
return P;
}
/**
* Getter method for item profile of this local model.
*
* @return The item profile of this local model.
*/
public DenseMatrix getItemFeatures() {
return Q;
}
/**
* Getter method for current train error.
*
* @return The current train error.
*/
public double getTrainErr() {
return trainErr;
}
/**
* Learn this local model based on similar users to the anchor user and
* similar items to the anchor item. Implemented with gradient descent.
*/
@Override
public void run() {
trainErr = Double.MAX_VALUE;
for (int u = 0; u < userCount; u++) {
for (int r = 0; r < numFactors; r++) {
double rdm = Randoms.gaussian(0.0, 0.01);
P.set(u, r, rdm);
}
}
for (int i = 0; i < itemCount; i++) {
for (int r = 0; r < numFactors; r++) {
double rdm = Randoms.gaussian(0.0, 0.01);
Q.set(i, r, rdm);
}
}
int round = 0;
int rateCount = trainMatrix.size();
double prevErr = 99999;
double currErr = 9999;
while (Math.abs(prevErr - currErr) > 0.0001 && round < maxIter) {
double loss = 0.0;
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int j = me.column(); // item
double ruj = me.get();
double puj = 0;
try {
puj = predict(u, j);
} catch (Exception e) {
}
double euj = ruj - puj;
loss += euj * euj;
double weight = w.get(u) * v.get(j);
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f), qjf = Q.get(j, f);
P.add(u, f, lRate * (euj * qjf * weight - regU * puf));
Q.add(j, f, lRate * (euj * puf * weight - regI * qjf));
loss += regU * puf * puf + regI * qjf * qjf;
}
}
prevErr = currErr;
currErr = loss / rateCount;
trainErr = Math.sqrt(currErr);
round++;
}
}
protected double predict(int u, int j) throws Exception {
return DenseMatrix.rowMult(P, u, Q, j);
}
}
| 5,519 | 21.348178 | 117 | java |
librec | librec-master/librec/src/main/java/librec/rating/SVDPlusPlus.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.List;
import librec.data.DenseMatrix;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
/**
* Yehuda Koren, <strong>Factorization Meets the Neighborhood: a Multifaceted Collaborative Filtering Model.</strong>,
* KDD 2008.
*
* @author guoguibing
*
*/
public class SVDPlusPlus extends BiasedMF {
protected DenseMatrix Y;
public SVDPlusPlus(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
setAlgoName("SVD++");
}
@Override
protected void initModel() throws Exception {
super.initModel();
Y = new DenseMatrix(numItems, numFactors);
Y.init(initMean, initStd);
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int j = me.column(); // item
double ruj = me.get();
double pred = predict(u, j);
double euj = ruj - pred;
loss += euj * euj;
List<Integer> items = userItemsCache.get(u);
double w = Math.sqrt(items.size());
// update factors
double bu = userBias.get(u);
double sgd = euj - regB * bu;
userBias.add(u, lRate * sgd);
loss += regB * bu * bu;
double bj = itemBias.get(j);
sgd = euj - regB * bj;
itemBias.add(j, lRate * sgd);
loss += regB * bj * bj;
double[] sum_ys = new double[numFactors];
for (int f = 0; f < numFactors; f++) {
double sum_f = 0;
for (int k : items)
sum_f += Y.get(k, f);
sum_ys[f] = w > 0 ? sum_f / w : sum_f;
}
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
double sgd_u = euj * qjf - regU * puf;
double sgd_j = euj * (puf + sum_ys[f]) - regI * qjf;
P.add(u, f, lRate * sgd_u);
Q.add(j, f, lRate * sgd_j);
loss += regU * puf * puf + regI * qjf * qjf;
for (int k : items) {
double ykf = Y.get(k, f);
double delta_y = euj * qjf / w - regU * ykf;
Y.add(k, f, lRate * delta_y);
loss += regU * ykf * ykf;
}
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}// end of training
}
@Override
public double predict(int u, int j) throws Exception {
double pred = globalMean + userBias.get(u) + itemBias.get(j) + DenseMatrix.rowMult(P, u, Q, j);
List<Integer> items = userItemsCache.get(u);
double w = Math.sqrt(items.size());
for (int k : items)
pred += DenseMatrix.rowMult(Y, k, Q, j) / w;
return pred;
}
}
| 3,336 | 22.835714 | 118 | java |
librec | librec-master/librec/src/main/java/librec/rating/TimeSVD.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
import librec.util.Randoms;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* Koren, <strong>Collaborative Filtering with Temporal Dynamics</strong>, KDD 2009.
*
* @author guoguibing
*
*/
public class TimeSVD extends IterativeRecommender {
// the span of days of rating timestamps
private static int numDays;
// {user, mean date}
private DenseVector userMeanDate;
// time decay factor
private float beta;
// number of bins over all the items
private int numBins;
// item's implicit influence
private DenseMatrix Y;
// {item, bin(t)} bias matrix
private DenseMatrix Bit;
// {user, day, bias} table
private Table<Integer, Integer, Double> But;
// user bias weight parameters
private DenseVector Alpha;
// {user, feature} alpha matrix
private DenseMatrix Auk;
// {user, {feature, day, value} } map
private Map<Integer, Table<Integer, Integer, Double>> Pukt;
// {user, user scaling stable part}
private DenseVector Cu;
// {user, day, day-specific scaling part}
private DenseMatrix Cut;
public TimeSVD(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
setAlgoName("timeSVD++");
beta = algoOptions.getFloat("-beta");
numBins = algoOptions.getInt("-bins");
}
@Override
protected void initModel() throws Exception {
super.initModel();
numDays = days(maxTimestamp, minTimestamp) + 1;
userBias = new DenseVector(numUsers);
userBias.init();
itemBias = new DenseVector(numItems);
itemBias.init();
Alpha = new DenseVector(numUsers);
Alpha.init();
Bit = new DenseMatrix(numItems, numBins);
Bit.init();
Y = new DenseMatrix(numItems, numFactors);
Y.init();
Auk = new DenseMatrix(numUsers, numFactors);
Auk.init();
But = HashBasedTable.create();
Pukt = new HashMap<>();
Cu = new DenseVector(numUsers);
Cu.init();
Cut = new DenseMatrix(numUsers, numDays);
Cut.init();
// cache
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
// global average date
double sum = 0;
int cnt = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
if (rui <= 0)
continue;
sum += days((long) timeMatrix.get(u, i), minTimestamp);
cnt++;
}
double globalMeanDate = sum / cnt;
// compute user's mean of rating timestamps
userMeanDate = new DenseVector(numUsers);
List<Integer> Ru = null;
for (int u = 0; u < numUsers; u++) {
sum = 0;
Ru = userItemsCache.get(u);
for (int i : Ru) {
sum += days((long) timeMatrix.get(u, i), minTimestamp);
}
double mean = (Ru.size() > 0) ? (sum + 0.0) / Ru.size() : globalMeanDate;
userMeanDate.set(u, mean);
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rui = me.get();
long timestamp = (long) timeMatrix.get(u, i);
// day t
int t = days(timestamp, minTimestamp);
int bin = bin(t);
double dev_ut = dev(u, t);
double bi = itemBias.get(i);
double bit = Bit.get(i, bin);
double bu = userBias.get(u);
double cu = Cu.get(u);
double cut = Cut.get(u, t);
// lazy initialization
if (!But.contains(u, t))
But.put(u, t, Randoms.random());
double but = But.get(u, t);
double au = Alpha.get(u); // alpha_u
double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
pui += bu + au * dev_ut + but; // bu(t)
// qi * yj
List<Integer> Ru = userItemsCache.get(u);
double sum_y = 0;
for (int j : Ru) {
sum_y += DenseMatrix.rowMult(Y, j, Q, i);
}
double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
pui += sum_y * wi;
// qi * pu(t)
if (!Pukt.containsKey(u)) {
Table<Integer, Integer, Double> data = HashBasedTable.create();
Pukt.put(u, data);
}
Table<Integer, Integer, Double> Pkt = Pukt.get(u);
for (int k = 0; k < numFactors; k++) {
double qik = Q.get(i, k);
// lazy initialization
if (!Pkt.contains(k, t))
Pkt.put(k, t, Randoms.random());
double puk = P.get(u, k) + Auk.get(u, k) * dev_ut + Pkt.get(k, t);
pui += puk * qik;
}
double eui = pui - rui;
loss += eui * eui;
// update bi
double sgd = eui * (cu + cut) + regB * bi;
itemBias.add(i, -lRate * sgd);
loss += regB * bi * bi;
// update bi,bin(t)
sgd = eui * (cu + cut) + regB * bit;
Bit.add(i, bin, -lRate * sgd);
loss += regB * bit * bit;
// update cu
sgd = eui * (bi + bit) + regB * cu;
Cu.add(u, -lRate * sgd);
loss += regB * cu * cu;
// update cut
sgd = eui * (bi + bit) + regB * cut;
Cut.add(u, t, -lRate * sgd);
loss += regB * cut * cut;
// update bu
sgd = eui + regB * bu;
userBias.add(u, -lRate * sgd);
loss += regB * bu * bu;
// update au
sgd = eui * dev_ut + regB * au;
Alpha.add(u, -lRate * sgd);
loss += regB * au * au;
// update but
sgd = eui + regB * but;
double delta = but - lRate * sgd;
But.put(u, t, delta);
loss += regB * but * but;
for (int k = 0; k < numFactors; k++) {
double qik = Q.get(i, k);
double puk = P.get(u, k);
double auk = Auk.get(u, k);
double pkt = Pkt.get(k, t);
// update qik
double pukt = puk + auk * dev_ut + pkt;
double sum_yk = 0;
for (int j : Ru)
sum_yk += Y.get(j, k);
sgd = eui * (pukt + wi * sum_yk) + regI * qik;
Q.add(i, k, -lRate * sgd);
loss += regI * qik * qik;
// update puk
sgd = eui * qik + regU * puk;
P.add(u, k, -lRate * sgd);
loss += regU * puk * puk;
// update auk
sgd = eui * qik * dev_ut + regU * auk;
Auk.add(u, k, -lRate * sgd);
loss += regU * auk * auk;
// update pkt
sgd = eui * qik + regU * pkt;
delta = pkt - lRate * sgd;
Pkt.put(k, t, delta);
loss += regU * pkt * pkt;
// update yjk
for (int j : Ru) {
double yjk = Y.get(j, k);
sgd = eui * wi * qik + regI * yjk;
Y.add(j, k, -lRate * sgd);
loss += regI * yjk * yjk;
}
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int i) throws Exception {
// retrieve the test rating timestamp
long timestamp = (long) testTimeMatrix.get(u, i);
int t = days(timestamp, minTimestamp);
int bin = bin(t);
double dev_ut = dev(u, t);
double pred = globalMean;
// bi(t): eq. (12)
pred += (itemBias.get(i) + Bit.get(i, bin)) * (Cu.get(u) + Cut.get(u, t));
// bu(t): eq. (9)
double but = But.contains(u, t) ? But.get(u, t) : 0;
pred += userBias.get(u) + Alpha.get(u) * dev_ut + but;
// qi * yj
List<Integer> Ru = userItemsCache.get(u);
double sum_y = 0;
for (int j : Ru)
sum_y += DenseMatrix.rowMult(Y, j, Q, i);
double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
pred += sum_y * wi;
// qi * pu(t)
for (int k = 0; k < numFactors; k++) {
double qik = Q.get(i, k);
// eq. (13)
double puk = P.get(u, k) + Auk.get(u, k) * dev_ut;
if (Pukt.containsKey(u)) {
Table<Integer, Integer, Double> pkt = Pukt.get(u);
if (pkt != null) {
// eq. (13)
puk += (pkt.contains(k, t) ? pkt.get(k, t) : 0);
}
}
pred += puk * qik;
}
return pred;
}
@Override
public String toString() {
return super.toString() + "," + Strings.toString(new Object[] { beta, numBins });
}
/***************************************************************** Functional Methods *******************************************/
/**
* @return the time deviation for a specific timestamp t w.r.t the mean date tu
*/
protected double dev(int u, int t) {
double tu = userMeanDate.get(u);
// date difference in days
double diff = t - tu;
return Math.signum(diff) * Math.pow(Math.abs(diff), beta);
}
/**
* @return the bin number (starting from 0..numBins-1) for a specific timestamp t;
*/
protected int bin(int day) {
return (int) (day / (numDays + 0.0) * numBins);
}
/**
* @return number of days for a given time difference
*/
protected static int days(long diff) {
return (int) TimeUnit.MILLISECONDS.toDays(diff);
}
/**
* @return number of days between two timestamps
*/
protected static int days(long t1, long t2) {
return days(Math.abs(t1 - t2));
}
}
| 9,504 | 23.247449 | 131 | java |
librec | librec-master/librec/src/main/java/librec/rating/BiasedMF.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
/**
* Biased Matrix Factorization Models. <br/>
*
* NOTE: To have more control on learning, you can add additional regularation parameters to user/item biases. For
* simplicity, we do not do this.
*
* @author guoguibing
*
*/
public class BiasedMF extends IterativeRecommender {
public BiasedMF(SparseMatrix rm, SparseMatrix tm, int fold) {
super(rm, tm, fold);
}
protected void initModel() throws Exception {
super.initModel();
userBias = new DenseVector(numUsers);
itemBias = new DenseVector(numItems);
// initialize user bias
userBias.init(initMean, initStd);
itemBias.init(initMean, initStd);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int j = me.column(); // item
double ruj = me.get();
double pred = predict(u, j, false);
double euj = ruj - pred;
loss += euj * euj;
// update factors
double bu = userBias.get(u);
double sgd = euj - regB * bu;
userBias.add(u, lRate * sgd);
loss += regB * bu * bu;
double bj = itemBias.get(j);
sgd = euj - regB * bj;
itemBias.add(j, lRate * sgd);
loss += regB * bj * bj;
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
double delta_u = euj * qjf - regU * puf;
double delta_j = euj * puf - regI * qjf;
P.add(u, f, lRate * delta_u);
Q.add(j, f, lRate * delta_j);
loss += regU * puf * puf + regI * qjf * qjf;
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}// end of training
}
public double predict(int u, int j) throws Exception {
return globalMean + userBias.get(u) + itemBias.get(j) + DenseMatrix.rowMult(P, u, Q, j);
}
}
| 2,723 | 23.321429 | 114 | java |
librec | librec-master/librec/src/main/java/librec/rating/LLORMA.java | package librec.rating;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
import librec.intf.Recommender;
import librec.util.KernelSmoothing;
import librec.util.Logs;
import java.util.List;
/**
* <h3>Local Low-Rank Matrix Approximation</h3>
* <p>
* This implementation refers to the method proposed by Lee et al. at ICML 2013.
* <p>
* <strong>Lcoal Structure:</strong> Joonseok Lee, <strong>Local Low-Rank Matrix
* Approximation </strong>, ICML. 2013: 82-90.
*
* @author wubin
*/
public class LLORMA extends IterativeRecommender {
private static int localNumFactors;
private static int localNumIters;
private static int multiThreadCount;
protected static float localRegU, localRegI;
private float localLRate;
private SparseMatrix predictMatrix;
private static int modelMax;
private static SparseMatrix testIndexMatrix;// test index matrix for predict
private SparseMatrix cumPrediction, cumWeight;
public LLORMA(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
algoOptions = cf.getParamOptions("LLORMA");
localNumFactors = algoOptions.getInt("-lnf", 20);
multiThreadCount = algoOptions.getInt("-mtc", 4);
modelMax = algoOptions.getInt("-mm", 50);
multiThreadCount = multiThreadCount > modelMax ? modelMax : multiThreadCount;
localNumIters = algoOptions.getInt("-lni", 100);
localLRate = algoOptions.getFloat("-lr", 0.01f);
localRegI = algoOptions.getFloat("-lu", 0.001f);
localRegU = algoOptions.getFloat("-li", 0.001f);
predictMatrix = new SparseMatrix(testMatrix);
}
@Override
protected void initModel() throws Exception {
testIndexMatrix = new SparseMatrix(testMatrix);
for (MatrixEntry me : testIndexMatrix) {
int u = me.row();
int i = me.column();
testIndexMatrix.set(u, i, 0.0);
}
// global svd P Q to calculate the kernel value between users (or items)
P = new DenseMatrix(numUsers, numFactors);
Q = new DenseMatrix(numItems, numFactors);
// initialize model
if (initByNorm) {
P.init(initMean, initStd);
Q.init(initMean, initStd);
} else {
P.init(); // P.init(smallValue);
Q.init(); // Q.init(smallValue);
}
this.buildGlobalModel();
}
// global svd P Q
private void buildGlobalModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int i = me.column(); // item
double rui = me.get();
double pui = DenseMatrix.rowMult(P, u, Q, i);
double eui = rui - pui;
// update factors
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f), qif = Q.get(i, f);
P.add(u, f, lRate * (eui * qif - regU * puf));
Q.add(i, f, lRate * (eui * puf - regI * qif));
}
}
} // end of training
}
@Override
protected void buildModel() throws Exception {
// Pre-calculating similarity:
int completeModelCount = 0;
LLORMAUpdater[] learners = new LLORMAUpdater[multiThreadCount];
int[] anchorUser = new int[modelMax];
int[] anchorItem = new int[modelMax];
int modelCount = 0;
int[] runningThreadList = new int[multiThreadCount];
int runningThreadCount = 0;
int waitingThreadPointer = 0;
int nextRunningSlot = 0;
cumPrediction = new SparseMatrix(testIndexMatrix);
cumWeight = new SparseMatrix(testIndexMatrix);
// Parallel training:
while (completeModelCount < modelMax) {
int u_t = (int) Math.floor(Math.random() * numUsers);
List<Integer> itemList = trainMatrix.getColumns(u_t);
if (itemList != null) {
if (runningThreadCount < multiThreadCount && modelCount < modelMax) {
// Selecting a new anchor point:
int idx = (int) Math.floor(Math.random() * itemList.size());
int i_t = itemList.get(idx);
anchorUser[modelCount] = u_t;
anchorItem[modelCount] = i_t;
// Preparing weight vectors:
DenseVector w = kernelSmoothing(numUsers, u_t, KernelSmoothing.EPANECHNIKOV_KERNEL, 0.8, false);
DenseVector v = kernelSmoothing(numItems, i_t, KernelSmoothing.EPANECHNIKOV_KERNEL, 0.8, true);
// Starting a new local model learning:
learners[nextRunningSlot] = new LLORMAUpdater(modelCount, localNumFactors, numUsers, numItems, u_t,
i_t, localLRate, localRegU, localRegI, localNumIters, w, v, trainMatrix);
learners[nextRunningSlot].start();
runningThreadList[runningThreadCount] = modelCount;
runningThreadCount++;
modelCount++;
nextRunningSlot++;
} else if (runningThreadCount > 0) {
// Joining a local model which was done with learning:
try {
learners[waitingThreadPointer].join();
} catch (InterruptedException ie) {
System.out.println("Join failed: " + ie);
}
int mp = waitingThreadPointer;
int mc = completeModelCount;
completeModelCount++;
// Predicting with the new local model and all previous
// models:
predictMatrix = new SparseMatrix(testIndexMatrix);
for (MatrixEntry me : testMatrix) {
int u = me.row();
int i = me.column();
double weight = KernelSmoothing.kernelize(getUserSimilarity(anchorUser[mc], u), 0.8,
KernelSmoothing.EPANECHNIKOV_KERNEL)
* KernelSmoothing.kernelize(getItemSimilarity(anchorItem[mc], i), 0.8,
KernelSmoothing.EPANECHNIKOV_KERNEL);
double newPrediction = (learners[mp].getUserFeatures().row(u)
.inner(learners[mp].getItemFeatures().row(i))) * weight;
cumWeight.set(u, i, cumWeight.get(u, i) + weight);
cumPrediction.set(u, i, cumPrediction.get(u, i) + newPrediction);
double prediction = cumPrediction.get(u, i) / cumWeight.get(u, i);
if (Double.isNaN(prediction) || prediction == 0.0) {
prediction = globalMean;
}
if (prediction < minRate) {
prediction = minRate;
} else if (prediction > maxRate) {
prediction = maxRate;
}
predictMatrix.set(u, i, prediction);
}
if (completeModelCount % 5 == 0) {
evalRatings();
Logs.debug("{}{} iter {}:[" + measures.getMetricNamesString() + "] {}", algoName, foldInfo,
completeModelCount, "[" + measures.getEvalResultString() + "]");
}
nextRunningSlot = waitingThreadPointer;
waitingThreadPointer = (waitingThreadPointer + 1) % multiThreadCount;
runningThreadCount--;
}
}
}
}
/**
* Calculate similarity between two users, based on the global base SVD.
*
* @param idx1
* The first user's ID.
* @param idx2
* The second user's ID.
* @return The similarity value between two users idx1 and idx2.
*/
private double getUserSimilarity(int idx1, int idx2) {
double sim;
DenseVector u_vec = P.row(idx1);
DenseVector v_vec = P.row(idx2);
sim = 1 - 2.0 / Math.PI
* Math.acos(u_vec.inner(v_vec) / (Math.sqrt(u_vec.inner(u_vec)) * Math.sqrt(v_vec.inner(v_vec))));
if (Double.isNaN(sim)) {
sim = 0.0;
}
return sim;
}
/**
* Calculate similarity between two items, based on the global base SVD.
*
* @param idx1
* The first item's ID.
* @param idx2
* The second item's ID.
* @return The similarity value between two items idx1 and idx2.
*/
private double getItemSimilarity(int idx1, int idx2) {
double sim;
DenseVector i_vec = Q.row(idx1);
DenseVector j_vec = Q.row(idx2);
sim = 1 - 2.0 / Math.PI
* Math.acos(i_vec.inner(j_vec) / (Math.sqrt(i_vec.inner(i_vec)) * Math.sqrt(j_vec.inner(j_vec))));
if (Double.isNaN(sim)) {
sim = 0.0;
}
return sim;
}
/**
* Given the similarity, it applies the given kernel. This is done either
* for all users or for all items.
*
* @param size
* The length of user or item vector.
* @param id
* The identifier of anchor point.
* @param kernelType
* The type of kernel.
* @param width
* Kernel width.
* @param isItemFeature
* return item kernel if yes, return user kernel otherwise.
* @return The kernel-smoothed values for all users or all items.
*/
private DenseVector kernelSmoothing(int size, int id, int kernelType, double width, boolean isItemFeature) {
DenseVector newFeatureVector = new DenseVector(size);
newFeatureVector.set(id, 1.0);
for (int i = 0; i < size; i++) {
double sim;
if (isItemFeature) {
sim = getItemSimilarity(i, id);
} else {
sim = getUserSimilarity(i, id);
}
newFeatureVector.set(i, KernelSmoothing.kernelize(sim, width, kernelType));
}
return newFeatureVector;
}
@Override
public double predict(int u, int i) throws Exception {
return predictMatrix.get(u, i);
}
}
| 8,745 | 28.748299 | 109 | java |
librec | librec-master/librec/src/main/java/librec/rating/SocialMF.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.DenseMatrix;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.SocialRecommender;
/**
* Jamali and Ester, <strong>A matrix factorization technique with trust propagation for recommendation in social
* networks</strong>, RecSys 2010.
*
* @author guoguibing
*
*/
public class SocialMF extends SocialRecommender {
public SocialMF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
initByNorm = false;
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
DenseMatrix PS = new DenseMatrix(numUsers, numFactors);
DenseMatrix QS = new DenseMatrix(numItems, numFactors);
// rated items
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j, false);
double euj = g(pred) - normalize(ruj);
loss += euj * euj;
double csgd = gd(pred) * euj;
for (int f = 0; f < numFactors; f++) {
PS.add(u, f, csgd * Q.get(j, f) + regU * P.get(u, f));
QS.add(j, f, csgd * P.get(u, f) + regI * Q.get(j, f));
loss += regU * P.get(u, f) * P.get(u, f);
loss += regI * Q.get(j, f) * Q.get(j, f);
}
}
// social regularization
for (int u = 0; u < numUsers; u++) {
SparseVector uv = socialMatrix.row(u);
int numConns = uv.getCount();
if (numConns == 0)
continue;
double[] sumNNs = new double[numFactors];
for (int v : uv.getIndex()) {
for (int f = 0; f < numFactors; f++)
sumNNs[f] += socialMatrix.get(u, v) * P.get(v, f);
}
for (int f = 0; f < numFactors; f++) {
double diff = P.get(u, f) - sumNNs[f] / numConns;
PS.add(u, f, regS * diff);
loss += regS * diff * diff;
}
// those who trusted user u
SparseVector iuv = socialMatrix.column(u);
int numVs = iuv.getCount();
for (int v : iuv.getIndex()) {
double tvu = socialMatrix.get(v, u);
SparseVector vv = socialMatrix.row(v);
double[] sumDiffs = new double[numFactors];
for (int w : vv.getIndex()) {
for (int f = 0; f < numFactors; f++)
sumDiffs[f] += socialMatrix.get(v, w) * P.get(w, f);
}
numConns = vv.getCount();
if (numConns > 0)
for (int f = 0; f < numFactors; f++)
PS.add(u, f, -regS * (tvu / numVs) * (P.get(v, f) - sumDiffs[f] / numConns));
}
}
// update user factors
P = P.add(PS.scale(-lRate));
Q = Q.add(QS.scale(-lRate));
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int j, boolean bounded) {
double pred = DenseMatrix.rowMult(P, u, Q, j);
if (bounded)
return denormalize(g(pred));
return pred;
}
}
| 3,572 | 25.080292 | 113 | java |
librec | librec-master/librec/src/main/java/librec/rating/TrustMF.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.SocialRecommender;
/**
* Yang et al., <strong>Social Collaborative Filtering by Trust</strong>, IJCAI 2013.
*
* @author guoguibing
*
*/
public class TrustMF extends SocialRecommender {
protected DenseMatrix Br, Wr, Vr;
protected DenseMatrix Be, We, Ve;
protected String model;
public TrustMF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
model = algoOptions.getString("-m");
algoName = "TrustMF (" + model + ")";
}
protected void initTr() {
Vr = new DenseMatrix(numItems, numFactors);
Br = new DenseMatrix(numUsers, numFactors);
Wr = new DenseMatrix(numUsers, numFactors);
Vr.init();
Br.init();
Wr.init();
}
protected void initTe() {
Ve = new DenseMatrix(numItems, numFactors);
Be = new DenseMatrix(numUsers, numFactors);
We = new DenseMatrix(numUsers, numFactors);
Ve.init();
Be.init();
We.init();
}
@Override
protected void initModel() throws Exception {
switch (model) {
case "Tr":
initTr();
break;
case "Te":
initTe();
break;
case "T":
default:
initTr();
initTe();
break;
}
}
@Override
protected void buildModel() throws Exception {
switch (model) {
case "Tr":
TrusterMF();
break;
case "Te":
TrusteeMF();
break;
case "T":
default:
TrusterMF();
TrusteeMF();
break;
}
}
/**
* Build TrusterMF model: Br*Vr
*/
protected void TrusterMF() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// gradients of B, V, W
DenseMatrix BS = new DenseMatrix(numUsers, numFactors);
DenseMatrix WS = new DenseMatrix(numUsers, numFactors);
DenseMatrix VS = new DenseMatrix(numItems, numFactors);
// rate matrix
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j, false);
double euj = g(pred) - normalize(ruj);
loss += euj * euj;
double csgd = gd(pred) * euj;
for (int f = 0; f < numFactors; f++) {
BS.add(u, f, csgd * Vr.get(j, f) + regU * Br.get(u, f));
VS.add(j, f, csgd * Br.get(u, f) + regI * Vr.get(j, f));
loss += regU * Br.get(u, f) * Br.get(u, f);
loss += regI * Vr.get(j, f) * Vr.get(j, f);
}
}
// social matrix
for (MatrixEntry me : socialMatrix) {
int u = me.row();
int k = me.column();
double tuk = me.get();
if (tuk > 0) {
double pred = DenseMatrix.rowMult(Br, u, Wr, k);
double euj = g(pred) - tuk;
loss += regS * euj * euj;
double csgd = gd(pred) * euj;
for (int f = 0; f < numFactors; f++) {
BS.add(u, f, regS * csgd * Wr.get(k, f) + regU * Br.get(u, f));
WS.add(k, f, regS * csgd * Br.get(u, f) + regU * Wr.get(k, f));
loss += regU * Br.get(u, f) * Br.get(u, f);
loss += regU * Wr.get(u, f) * Wr.get(u, f);
}
}
}
Br = Br.add(BS.scale(-lRate));
Vr = Vr.add(VS.scale(-lRate));
Wr = Wr.add(WS.scale(-lRate));
loss *= 0.5;
if (isConverged(iter))
break;
}
}
/**
* Build TrusteeMF model: We*Ve
*/
protected void TrusteeMF() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
// gradients of B, V, W
DenseMatrix BS = new DenseMatrix(numUsers, numFactors);
DenseMatrix WS = new DenseMatrix(numUsers, numFactors);
DenseMatrix VS = new DenseMatrix(numItems, numFactors);
// rate matrix
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j, false);
double euj = g(pred) - normalize(ruj);
loss += euj * euj;
double csgd = gd(pred) * euj;
for (int f = 0; f < numFactors; f++) {
WS.add(u, f, csgd * Ve.get(j, f) + regU * We.get(u, f));
VS.add(j, f, csgd * We.get(u, f) + regI * Ve.get(j, f));
loss += regU * We.get(u, f) * We.get(u, f);
loss += regI * Ve.get(j, f) * Ve.get(j, f);
}
}
// social matrix
for (MatrixEntry me : socialMatrix) {
int k = me.row();
int u = me.column();
double tku = me.get();
if (tku > 0) {
double pred = DenseMatrix.rowMult(Be, k, We, u);
double euj = g(pred) - tku;
loss += regS * euj * euj;
double csgd = gd(pred) * euj;
for (int f = 0; f < numFactors; f++) {
WS.add(u, f, regS * csgd * Be.get(k, f) + regU * We.get(u, f));
BS.add(k, f, regS * csgd * We.get(u, f) + regU * Be.get(k, f));
loss += regU * We.get(u, f) * We.get(u, f);
loss += regU * Be.get(k, f) * Be.get(k, f);
}
}
}
Be = Be.add(BS.scale(-lRate));
Ve = Ve.add(VS.scale(-lRate));
We = We.add(WS.scale(-lRate));
loss *= 0.5;
if (isConverged(iter))
break;
}
}
/**
* This is the method used by the paper authors
*/
protected void updateLRate(int iter) {
if (iter == 10)
lRate *= 0.6;
else if (iter == 30)
lRate *= 0.333;
else if (iter == 100)
lRate *= 0.5;
}
public double predict(int u, int j, boolean bounded) {
double pred = 0.0;
switch (model) {
case "Tr":
pred = DenseMatrix.rowMult(Br, u, Vr, j);
break;
case "Te":
pred = DenseMatrix.rowMult(We, u, Ve, j);
break;
case "T":
default:
DenseVector uv = Br.row(u).add(We.row(u, false));
DenseVector jv = Vr.row(j).add(Ve.row(j, false));
pred = uv.scale(0.5).inner(jv.scale(0.5));
break;
}
if (bounded)
return denormalize(g(pred));
return pred;
}
}
| 6,334 | 21.464539 | 85 | java |
librec | librec-master/librec/src/main/java/librec/rating/GPLSA.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.GraphicRecommender;
import librec.util.Gaussian;
import librec.util.Logs;
import librec.util.Randoms;
import librec.util.Stats;
import librec.util.Strings;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* Thomas Hofmann, <strong>Collaborative Filtering via Gaussian Probabilistic Latent Semantic Analysis</strong>, SIGIR
* 2003. <br>
*
* <strong>Tempered EM:</strong> Thomas Hofmann, <strong>Unsupervised Learning by Probabilistic Latent Semantic
* Analysis</strong>, Machine Learning, 42, 177�C196, 2001.
*
* @author Guo Guibing
*
*/
@AddConfiguration(before = "factors, q, b")
public class GPLSA extends GraphicRecommender {
// {user, item, {factor z, probability}}
private Table<Integer, Integer, Map<Integer, Double>> Q;
private DenseMatrix Mu, Sigma;
private DenseVector mu, sigma;
private float q; // smoothing weight
private float b; // tempered EM parameter beta, suggested by Wu Bin
private double preRMSE;
public GPLSA(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
// Pz_u
Puk = new DenseMatrix(numUsers, numFactors);
for (int u = 0; u < numUsers; u++) {
double[] probs = Randoms.randProbs(numFactors);
for (int k = 0; k < numFactors; k++) {
Puk.set(u, k, probs[k]);
}
}
// normalize ratings
double mean = globalMean;
double sd = Stats.sd(trainMatrix.getData(), mean);
q = algoOptions.getFloat("-q");
b = algoOptions.getFloat("-b", 1.0f);
mu = new DenseVector(numUsers);
sigma = new DenseVector(numUsers);
for (int u = 0; u < numUsers; u++) {
SparseVector ru = trainMatrix.row(u);
int Nu = ru.size();
if (Nu < 1)
continue;
// compute mu_u
double mu_u = (ru.sum() + q * mean) / (Nu + q);
mu.set(u, mu_u);
// compute sigma_u
double sum = 0;
for (VectorEntry ve : ru) {
sum += Math.pow(ve.get() - mu_u, 2);
}
sum += q * Math.pow(sd, 2);
double sigma_u = Math.sqrt(sum / (Nu + q));
sigma.set(u, sigma_u);
}
// initialize Q
Q = HashBasedTable.create();
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double rate = me.get();
double r = (rate - mu.get(u)) / sigma.get(u); // continuous ratings
me.set(r);
Q.put(u, i, new HashMap<Integer, Double>());
}
// initialize Mu, Sigma
Mu = new DenseMatrix(numItems, numFactors);
Sigma = new DenseMatrix(numItems, numFactors);
for (int i = 0; i < numItems; i++) {
SparseVector ci = trainMatrix.column(i);
int Ni = ci.size();
if (Ni < 1)
continue;
double mu_i = ci.mean();
double sum = 0;
for (VectorEntry ve : ci) {
sum += Math.pow(ve.get() - mu_i, 2);
}
double sd_i = Math.sqrt(sum / Ni);
for (int z = 0; z < numFactors; z++) {
Mu.set(i, z, mu_i + smallValue * Math.random());
Sigma.set(i, z, sd_i + smallValue * Math.random());
}
}
}
@Override
protected void eStep() {
// variational inference to compute Q
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int i = me.column();
double r = me.get();
double denominator = 0;
double[] numerator = new double[numFactors];
for (int z = 0; z < numFactors; z++) {
double pdf = Gaussian.pdf(r, Mu.get(i, z), Sigma.get(i, z));
double val = Math.pow(Puk.get(u, z) * pdf, b); // Tempered EM
numerator[z] = val;
denominator += val;
}
Map<Integer, Double> factorProbs = Q.get(u, i);
for (int z = 0; z < numFactors; z++) {
double prob = (denominator > 0 ? numerator[z] / denominator : 0);
factorProbs.put(z, prob);
}
}
}
@Override
protected void mStep() {
// theta_u,z
for (int u = 0; u < numUsers; u++) {
List<Integer> items = trainMatrix.getColumns(u);
if (items.size() < 1)
continue;
double[] numerator = new double[numFactors];
double denominator = 0;
for (int z = 0; z < numFactors; z++) {
for (int i : items) {
numerator[z] += Q.get(u, i).get(z);
}
denominator += numerator[z];
}
for (int z = 0; z < numFactors; z++) {
Puk.set(u, z, numerator[z] / denominator);
}
}
// Mu, Sigma
for (int i = 0; i < numItems; i++) {
List<Integer> users = trainMatrix.getRows(i);
if (users.size() < 1)
continue;
for (int z = 0; z < numFactors; z++) {
double numerator = 0, denominator = 0;
for (int u : users) {
double r = trainMatrix.get(u, i);
double prob = Q.get(u, i).get(z);
numerator += r * prob;
denominator += prob;
}
double mu = denominator > 0 ? numerator / denominator : 0;
Mu.set(i, z, mu);
numerator = 0;
denominator = 0;
for (int u : users) {
double r = trainMatrix.get(u, i);
double prob = Q.get(u, i).get(z);
numerator += Math.pow(r - mu, 2) * prob;
denominator += prob;
}
double sigma = denominator > 0 ? Math.sqrt(numerator / denominator) : 0;
Sigma.set(i, z, sigma);
}
}
}
@Override
public double predict(int u, int i) throws Exception {
double sum = 0;
for (int z = 0; z < numFactors; z++) {
sum += Puk.get(u, z) * Mu.get(i, z);
}
return mu.get(u) + sigma.get(u) * sum;
}
@Override
protected boolean isConverged(int iter) throws Exception {
if (validationMatrix == null)
return false;
// compute current RMSE
int numCount = 0;
double sum = 0;
for (MatrixEntry me : validationMatrix) {
double rate = me.get();
int u = me.row();
int j = me.column();
double pred = predict(u, j, true);
if (Double.isNaN(pred))
continue;
double err = rate - pred;
sum += err * err;
numCount++;
}
double RMSE = Math.sqrt(sum / numCount);
double delta = RMSE - preRMSE;
if (verbose) {
Logs.debug("{}{} iter {} achieves RMSE = {}, delta_RMSE = {}", algoName, foldInfo, iter, (float) RMSE,
(float) (delta));
}
if (numStats > 1 && delta > 0)
return true;
preRMSE = RMSE;
numStats++;
return false;
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, q, b }) + ", " + super.toString();
}
}
| 7,208 | 23.35473 | 118 | java |
librec | librec-master/librec/src/main/java/librec/rating/TrustSVD.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.List;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.SocialRecommender;
/**
* Guo et al., <strong>TrustSVD: Collaborative Filtering with Both the Explicit and Implicit Influence of User Trust and
* of Item Ratings</strong>, AAAI 2015.
*
* @author guoguibing
*
*/
public class TrustSVD extends SocialRecommender {
private DenseMatrix W, Y;
private DenseVector wlr_j, wlr_tc, wlr_tr;
public TrustSVD(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
super.initModel();
userBias = new DenseVector(numUsers);
itemBias = new DenseVector(numItems);
W = new DenseMatrix(numUsers, numFactors);
Y = new DenseMatrix(numItems, numFactors);
if (initByNorm) {
userBias.init(initMean, initStd);
itemBias.init(initMean, initStd);
W.init(initMean, initStd);
Y.init(initMean, initStd);
} else {
userBias.init();
itemBias.init();
W.init();
Y.init();
}
wlr_tc = new DenseVector(numUsers);
wlr_tr = new DenseVector(numUsers);
wlr_j = new DenseVector(numItems);
userItemsCache = trainMatrix.rowColumnsCache(cacheSpec);
userFriendsCache = socialMatrix.rowColumnsCache(cacheSpec);
for (int u = 0; u < numUsers; u++) {
int count = socialMatrix.columnSize(u);
wlr_tc.set(u, count > 0 ? 1.0 / Math.sqrt(count) : 1.0);
count = socialMatrix.rowSize(u);
wlr_tr.set(u, count > 0 ? 1.0 / Math.sqrt(count) : 1.0);
}
for (int j = 0; j < numItems; j++) {
int count = trainMatrix.columnSize(j);
wlr_j.set(j, count > 0 ? 1.0 / Math.sqrt(count) : 1.0);
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
DenseMatrix PS = new DenseMatrix(numUsers, numFactors);
DenseMatrix WS = new DenseMatrix(numUsers, numFactors);
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int j = me.column(); // item
double ruj = me.get(); // rating
// To speed up, directly access the prediction instead of invoking "pred = predict(u,j)"
double bu = userBias.get(u);
double bj = itemBias.get(j);
double pred = globalMean + bu + bj + DenseMatrix.rowMult(P, u, Q, j);
// Y
List<Integer> nu = userItemsCache.get(u);
if (nu.size() > 0) {
double sum = 0;
for (int i : nu)
sum += DenseMatrix.rowMult(Y, i, Q, j);
pred += sum / Math.sqrt(nu.size());
}
// W
List<Integer> tu = userFriendsCache.get(u);
if (tu.size() > 0) {
double sum = 0.0;
for (int v : tu)
sum += DenseMatrix.rowMult(W, v, Q, j);
pred += sum / Math.sqrt(tu.size());
}
double euj = pred - ruj;
loss += euj * euj;
double w_nu = Math.sqrt(nu.size());
double w_tu = Math.sqrt(tu.size());
// update factors
double reg_u = 1.0 / w_nu;
double reg_j = wlr_j.get(j);
double sgd = euj + regB * reg_u * bu;
userBias.add(u, -lRate * sgd);
sgd = euj + regB * reg_j * bj;
itemBias.add(j, -lRate * sgd);
loss += regB * reg_u * bu * bu;
loss += regB * reg_j * bj * bj;
double[] sum_ys = new double[numFactors];
for (int f = 0; f < numFactors; f++) {
double sum = 0;
for (int i : nu)
sum += Y.get(i, f);
sum_ys[f] = w_nu > 0 ? sum / w_nu : sum;
}
double[] sum_ts = new double[numFactors];
for (int f = 0; f < numFactors; f++) {
double sum = 0;
for (int v : tu)
sum += W.get(v, f);
sum_ts[f] = w_tu > 0 ? sum / w_tu : sum;
}
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
double delta_u = euj * qjf + regU * reg_u * puf;
double delta_j = euj * (puf + sum_ys[f] + sum_ts[f]) + regI * reg_j * qjf;
PS.add(u, f, delta_u);
Q.add(j, f, -lRate * delta_j);
loss += regU * reg_u * puf * puf + regI * reg_j * qjf * qjf;
for (int i : nu) {
double yif = Y.get(i, f);
double reg_yi = wlr_j.get(i);
double delta_y = euj * qjf / w_nu + regI * reg_yi * yif;
Y.add(i, f, -lRate * delta_y);
loss += regI * reg_yi * yif * yif;
}
// update wvf
for (int v : tu) {
double wvf = W.get(v, f);
double reg_v = wlr_tc.get(v);
double delta_t = euj * qjf / w_tu + regU * reg_v * wvf;
WS.add(v, f, delta_t);
loss += regU * reg_v * wvf * wvf;
}
}
}
for (MatrixEntry me : socialMatrix) {
int u = me.row();
int v = me.column();
double tuv = me.get();
if (tuv == 0)
continue;
double pred = DenseMatrix.rowMult(P, u, W, v);
double eut = pred - tuv;
loss += regS * eut * eut;
double csgd = regS * eut;
double reg_u = wlr_tr.get(u);
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double wvf = W.get(v, f);
PS.add(u, f, csgd * wvf + regS * reg_u * puf);
WS.add(v, f, csgd * puf);
loss += regS * reg_u * puf * puf;
}
}
P = P.add(PS.scale(-lRate));
W = W.add(WS.scale(-lRate));
loss *= 0.5;
if (isConverged(iter))
break;
}// end of training
}
@Override
public double predict(int u, int j) throws Exception {
double pred = globalMean + userBias.get(u) + itemBias.get(j) + DenseMatrix.rowMult(P, u, Q, j);
// Y
List<Integer> nu = userItemsCache.get(u);
if (nu.size() > 0) {
double sum = 0;
for (int i : nu)
sum += DenseMatrix.rowMult(Y, i, Q, j);
pred += sum / Math.sqrt(nu.size());
}
// W
List<Integer> tu = userFriendsCache.get(u);
if (tu.size() > 0) {
double sum = 0.0;
for (int v : tu)
sum += DenseMatrix.rowMult(W, v, Q, j);
pred += sum / Math.sqrt(tu.size());
}
return pred;
}
} | 6,591 | 24.160305 | 120 | java |
librec | librec-master/librec/src/main/java/librec/rating/BPMF.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.Configuration;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.IterativeRecommender;
import librec.util.Randoms;
import librec.util.Strings;
/**
* Salakhutdinov and Mnih, <strong>Bayesian Probabilistic Matrix Factorization using Markov Chain Monte Carlo</strong>,
* ICML 2008.
*
* <p>
* Matlab version is provided by the authors via <a href="http://www.utstat.toronto.edu/~rsalakhu/BPMF.html">this
* link</a>. This implementation is modified from the BayesianPMF by the PREA package.
* </p>
*
* @author guoguibing
*
*/
@Configuration("factors, iters")
public class BPMF extends IterativeRecommender {
public BPMF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
lRate = -1;
}
@Override
protected void buildModel() throws Exception {
// Initialize hierarchical priors
int beta = 2; // observation noise (precision)
DenseVector mu_u = new DenseVector(numFactors);
DenseVector mu_m = new DenseVector(numFactors);
// parameters of Inv-Whishart distribution
DenseMatrix WI_u = DenseMatrix.eye(numFactors);
int b0_u = 2;
int df_u = numFactors;
DenseVector mu0_u = new DenseVector(numFactors);
DenseMatrix WI_m = DenseMatrix.eye(numFactors);
int b0_m = 2;
int df_m = numFactors;
DenseVector mu0_m = new DenseVector(numFactors);
// initializing Bayesian PMF using MAP solution found by PMF
P = new DenseMatrix(numUsers, numFactors);
Q = new DenseMatrix(numItems, numFactors);
P.init(0, 1);
Q.init(0, 1);
for (int f = 0; f < numFactors; f++) {
mu_u.set(f, P.columnMean(f));
mu_m.set(f, Q.columnMean(f));
}
DenseMatrix alpha_u = P.cov().inv();
DenseMatrix alpha_m = Q.cov().inv();
// Iteration:
DenseVector x_bar = new DenseVector(numFactors);
DenseVector normalRdn = new DenseVector(numFactors);
DenseMatrix S_bar, WI_post, lam;
DenseVector mu_temp;
double df_upost, df_mpost;
int M = numUsers, N = numItems;
for (int iter = 1; iter <= numIters; iter++) {
// Sample from user hyper parameters:
for (int f = 0; f < numFactors; f++)
x_bar.set(f, P.columnMean(f));
S_bar = P.cov();
DenseVector mu0_u_x_bar = mu0_u.minus(x_bar);
DenseMatrix e1e2 = mu0_u_x_bar.outer(mu0_u_x_bar).scale(M * b0_u / (b0_u + M + 0.0));
WI_post = WI_u.inv().add(S_bar.scale(M)).add(e1e2);
WI_post = WI_post.inv();
WI_post = WI_post.add(WI_post.transpose()).scale(0.5);
df_upost = df_u + M;
DenseMatrix wishrnd_u = wishart(WI_post, df_upost);
if (wishrnd_u != null)
alpha_u = wishrnd_u;
mu_temp = mu0_u.scale(b0_u).add(x_bar.scale(M)).scale(1 / (b0_u + M + 0.0));
lam = alpha_u.scale(b0_u + M).inv().cholesky();
if (lam != null) {
lam = lam.transpose();
for (int f = 0; f < numFactors; f++)
normalRdn.set(f, Randoms.gaussian(0, 1));
mu_u = lam.mult(normalRdn).add(mu_temp);
}
// Sample from item hyper parameters:
for (int f = 0; f < numFactors; f++)
x_bar.set(f, Q.columnMean(f));
S_bar = Q.cov();
DenseVector mu0_m_x_bar = mu0_m.minus(x_bar);
DenseMatrix e3e4 = mu0_m_x_bar.outer(mu0_m_x_bar).scale(N * b0_m / (b0_m + N + 0.0));
WI_post = WI_m.inv().add(S_bar.scale(N)).add(e3e4);
WI_post = WI_post.inv();
WI_post = WI_post.add(WI_post.transpose()).scale(0.5);
df_mpost = df_m + N;
DenseMatrix wishrnd_m = wishart(WI_post, df_mpost);
if (wishrnd_m != null)
alpha_m = wishrnd_m;
mu_temp = mu0_m.scale(b0_m).add(x_bar.scale(N)).scale(1 / (b0_m + N + 0.0));
lam = alpha_m.scale(b0_m + N).inv().cholesky();
if (lam != null) {
lam = lam.transpose();
for (int f = 0; f < numFactors; f++)
normalRdn.set(f, Randoms.gaussian(0, 1));
mu_m = lam.mult(normalRdn).add(mu_temp);
}
// Gibbs updates over user and item feature vectors given hyper parameters:
// NOTE: in PREA, only 1 iter for gibbs where in the original Matlab code, 2 iters are used.
for (int gibbs = 0; gibbs < 2; gibbs++) {
// Infer posterior distribution over all user feature vectors
for (int u = 0; u < numUsers; u++) {
// list of items rated by user uu:
SparseVector rv = trainMatrix.row(u);
int count = rv.getCount();
if (count == 0)
continue;
// features of items rated by user uu:
DenseMatrix MM = new DenseMatrix(count, numFactors);
DenseVector rr = new DenseVector(count);
int idx = 0;
for (int j : rv.getIndex()) {
rr.set(idx, rv.get(j) - globalMean);
for (int f = 0; f < numFactors; f++)
MM.set(idx, f, Q.get(j, f));
idx++;
}
DenseMatrix covar = alpha_u.add((MM.transpose().mult(MM)).scale(beta)).inv();
DenseVector a = MM.transpose().mult(rr).scale(beta);
DenseVector b = alpha_u.mult(mu_u);
DenseVector mean_u = covar.mult(a.add(b));
lam = covar.cholesky();
if (lam != null) {
lam = lam.transpose();
for (int f = 0; f < numFactors; f++)
normalRdn.set(f, Randoms.gaussian(0, 1));
DenseVector w1_P1_u = lam.mult(normalRdn).add(mean_u);
for (int f = 0; f < numFactors; f++)
P.set(u, f, w1_P1_u.get(f));
}
}
// Infer posterior distribution over all movie feature vectors
for (int j = 0; j < numItems; j++) {
// list of users who rated item ii:
SparseVector jv = trainMatrix.column(j);
int count = jv.getCount();
if (count == 0)
continue;
// features of users who rated item ii:
DenseMatrix MM = new DenseMatrix(count, numFactors);
DenseVector rr = new DenseVector(count);
int idx = 0;
for (int u : jv.getIndex()) {
rr.set(idx, jv.get(u) - globalMean);
for (int f = 0; f < numFactors; f++)
MM.set(idx, f, P.get(u, f));
idx++;
}
DenseMatrix covar = alpha_m.add((MM.transpose().mult(MM)).scale(beta)).inv();
DenseVector a = MM.transpose().mult(rr).scale(beta);
DenseVector b = alpha_m.mult(mu_m);
DenseVector mean_m = covar.mult(a.add(b));
lam = covar.cholesky();
if (lam != null) {
lam = lam.transpose();
for (int f = 0; f < numFactors; f++)
normalRdn.set(f, Randoms.gaussian(0, 1));
DenseVector w1_M1_j = lam.mult(normalRdn).add(mean_m);
for (int f = 0; f < numFactors; f++)
Q.set(j, f, w1_M1_j.get(f));
}
}
} // end of gibbs
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j);
double euj = ruj - pred;
loss += euj * euj;
}
loss *= 0.5;
if (isConverged(iter))
break;
}
}
/**
* Randomly sample a matrix from Wishart Distribution with the given parameters.
*
* @param scale
* scale parameter for Wishart Distribution.
* @param df
* degree of freedom for Wishart Distribution.
* @return the sample randomly drawn from the given distribution.
*/
protected DenseMatrix wishart(DenseMatrix scale, double df) {
DenseMatrix A = scale.cholesky();
if (A == null)
return null;
int p = scale.numRows();
DenseMatrix z = new DenseMatrix(p, p);
for (int i = 0; i < p; i++) {
for (int j = 0; j < p; j++) {
z.set(i, j, Randoms.gaussian(0, 1));
}
}
SparseVector y = new SparseVector(p);
for (int i = 0; i < p; i++)
y.set(i, Randoms.gamma((df - (i + 1)) / 2, 2));
DenseMatrix B = new DenseMatrix(p, p);
B.set(0, 0, y.get(0));
if (p > 1) {
// rest of diagonal:
for (int j = 1; j < p; j++) {
SparseVector zz = new SparseVector(j);
for (int k = 0; k < j; k++)
zz.set(k, z.get(k, j));
B.set(j, j, y.get(j) + zz.inner(zz));
}
// first row and column:
for (int j = 1; j < p; j++) {
B.set(0, j, z.get(0, j) * Math.sqrt(y.get(0)));
B.set(j, 0, B.get(0, j)); // mirror
}
}
if (p > 2) {
for (int j = 2; j < p; j++) {
for (int i = 1; i <= j - 1; i++) {
SparseVector zki = new SparseVector(i);
SparseVector zkj = new SparseVector(i);
for (int k = 0; k <= i - 1; k++) {
zki.set(k, z.get(k, i));
zkj.set(k, z.get(k, j));
}
B.set(i, j, z.get(i, j) * Math.sqrt(y.get(i)) + zki.inner(zkj));
B.set(j, i, B.get(i, j)); // mirror
}
}
}
return A.transpose().mult(B).mult(A);
}
@Override
public double predict(int u, int j) {
return globalMean + DenseMatrix.rowMult(P, u, Q, j);
}
@Override
public String toString() {
return Strings.toString(new Object[] { numFactors, numIters });
}
}
| 9,362 | 27.720859 | 119 | java |
librec | librec-master/librec/src/main/java/librec/rating/CPTF.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.DenseMatrix;
import librec.data.SparseMatrix;
import librec.data.TensorEntry;
import librec.intf.TensorRecommender;
/**
* CANDECOMP/PARAFAC (CP) Tensor Factorization <br>
*
* Shao W., <strong>Tensor Completion</strong> (Section 3.2), Saarland University.
*
* @author Guo Guibing
*
*/
public class CPTF extends TensorRecommender {
// dimension-feature matrices
private DenseMatrix[] M;
public CPTF(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) throws Exception {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
M = new DenseMatrix[numDimensions];
for (int d = 0; d < numDimensions; d++) {
M[d] = new DenseMatrix(dimensions[d], numFactors);
M[d].init(smallValue); // randomly initialization
normalize(d);
}
}
protected void normalize(int d) {
// column-wise normalization
for (int f = 0; f < numFactors; f++) {
double norm = 0;
for (int r = 0; r < M[d].numRows(); r++) {
norm += Math.pow(M[d].get(r, f), 2);
}
norm = Math.sqrt(norm);
for (int r = 0; r < M[d].numRows(); r++) {
M[d].set(r, f, M[d].get(r, f) / norm);
}
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter < numIters; iter++) {
DenseMatrix[] Ms = new DenseMatrix[numDimensions];
for (int d = 0; d < numDimensions; d++) {
Ms[d] = new DenseMatrix(dimensions[d], numFactors);
}
// SGD Optimization
loss = 0;
// Step 1: compute gradients
for (TensorEntry te : trainTensor) {
int[] keys = te.keys();
double rate = te.get();
if (rate <= 0)
continue;
double pred = predict(keys);
double e = rate - pred;
loss += e * e;
// compute gradients
for (int d = 0; d < numDimensions; d++) {
for (int f = 0; f < numFactors; f++) {
// multiplication of other dimensions
double sgd = 1;
for (int dd = 0; dd < numDimensions; dd++) {
if (dd == d)
continue;
sgd *= M[dd].get(keys[dd], f);
}
Ms[d].add(keys[d], f, sgd * e);
}
}
}
// Step 2: update variables
for (int d = 0; d < numDimensions; d++) {
// update each M[d](r, c)
for (int r = 0; r < M[d].numRows(); r++) {
for (int c = 0; c < M[d].numColumns(); c++) {
double Mrc = M[d].get(r, c);
M[d].add(r, c, lRate * (Ms[d].get(r, c) - reg * Mrc));
loss += reg * Mrc * Mrc;
}
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}
}
protected double predict(int[] keys) {
double pred = 0;
for (int f = 0; f < numFactors; f++) {
double prod = 1;
for (int d = 0; d < numDimensions; d++) {
prod *= M[d].get(keys[d], f);
}
pred += prod;
}
return pred;
}
}
| 3,502 | 22.198675 | 92 | java |
librec | librec-master/librec/src/main/java/librec/rating/UserKNN.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import librec.data.Configuration;
import librec.data.DenseVector;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.SymmMatrix;
import librec.intf.Recommender;
import librec.util.Lists;
import librec.util.Stats;
import librec.util.Strings;
/**
* <h3>User-based Nearest Neighbors</h3>
*
* <p>
* It supports both recommendation tasks: (1) rating prediction; and (2) item ranking (by configuring
* {@code item.ranking=on} in the librec.conf). For item ranking, the returned score is the summation of the
* similarities of nearest neighbors.
* </p>
*
* <p>
* When the number of users is extremely large which makes it memory intensive to store/precompute all user-user
* correlations, a trick presented by (Jahrer and Toscher, Collaborative Filtering Ensemble, JMLR 2012) can be applied.
* Specifically, we can use a basic SVD model to obtain user-feature vectors, and then user-user correlations can be
* computed by Eqs (17, 15).
* </p>
*
* @author guoguibing
*
*/
@Configuration("knn, similarity, shrinkage")
public class UserKNN extends Recommender {
// user: nearest neighborhood
private SymmMatrix userCorrs;
private DenseVector userMeans;
public UserKNN(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
userCorrs = buildCorrs(true);
userMeans = new DenseVector(numUsers);
for (int u = 0; u < numUsers; u++) {
SparseVector uv = trainMatrix.row(u);
userMeans.set(u, uv.getCount() > 0 ? uv.mean() : globalMean);
}
}
@Override
public double predict(int u, int j) {
// find a number of similar users
Map<Integer, Double> nns = new HashMap<>();
SparseVector dv = userCorrs.row(u);
for (int v : dv.getIndex()) {
double sim = dv.get(v);
double rate = trainMatrix.get(v, j);
if (isRankingPred && rate > 0)
nns.put(v, sim); // similarity could be negative for item ranking
else if (sim > 0 && rate > 0)
nns.put(v, sim);
}
// topN similar users
if (knn > 0 && knn < nns.size()) {
List<Map.Entry<Integer, Double>> sorted = Lists.sortMap(nns, true);
List<Map.Entry<Integer, Double>> subset = sorted.subList(0, knn);
nns.clear();
for (Map.Entry<Integer, Double> kv : subset)
nns.put(kv.getKey(), kv.getValue());
}
if (nns.size() == 0)
return isRankingPred ? 0 : globalMean;
if (isRankingPred) {
// for item ranking
return Stats.sum(nns.values());
} else {
// for rating prediction
double sum = 0, ws = 0;
for (Entry<Integer, Double> en : nns.entrySet()) {
int v = en.getKey();
double sim = en.getValue();
double rate = trainMatrix.get(v, j);
sum += sim * (rate - userMeans.get(v));
ws += Math.abs(sim);
}
return ws > 0 ? userMeans.get(u) + sum / ws : globalMean;
}
}
@Override
public String toString() {
return Strings.toString(new Object[] { knn, similarityMeasure, similarityShrinkage });
}
}
| 3,829 | 28.236641 | 119 | java |
librec | librec-master/librec/src/main/java/librec/rating/RSTE.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.DenseMatrix;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.SocialRecommender;
import librec.util.Strings;
/**
* Hao Ma, Irwin King and Michael R. Lyu, <strong>Learning to Recommend with Social Trust Ensemble</strong>, SIGIR 2009.<br>
*
* <p>
* This method is quite time-consuming when dealing with the social influence part.
* </p>
*
* @author guoguibing
*
*/
public class RSTE extends SocialRecommender {
private float alpha;
public RSTE(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
initByNorm = false;
alpha = algoOptions.getFloat("-alpha");
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
DenseMatrix PS = new DenseMatrix(numUsers, numFactors);
DenseMatrix QS = new DenseMatrix(numItems, numFactors);
// ratings
for (int u : trainMatrix.rows()) {
SparseVector tu = socialMatrix.row(u);
int[] tks = tu.getIndex();
double ws = 0;
for (int k : tks)
ws += tu.get(k);
double[] sum_us = new double[numFactors];
for (int f = 0; f < numFactors; f++) {
for (int k : tks)
sum_us[f] += tu.get(k) * P.get(k, f);
}
for (VectorEntry ve : trainMatrix.row(u)) {
int j = ve.index();
double rate = ve.get();
double ruj = normalize(rate);
// compute directly to speed up calculation
double pred1 = DenseMatrix.rowMult(P, u, Q, j);
double sum = 0.0;
for (int k : tks)
sum += tu.get(k) * DenseMatrix.rowMult(P, k, Q, j);
double pred2 = ws > 0 ? sum / ws : 0;
double pred = alpha * pred1 + (1 - alpha) * pred2;
// prediction error
double euj = g(pred) - ruj;
loss += euj * euj;
double csgd = gd(pred) * euj;
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
double usgd = alpha * csgd * qjf + regU * puf;
double jd = ws > 0 ? sum_us[f] / ws : 0;
double jsgd = csgd * (alpha * puf + (1 - alpha) * jd) + regI * qjf;
PS.add(u, f, usgd);
QS.add(j, f, jsgd);
loss += regU * puf * puf + regI * qjf * qjf;
}
}
}
// social
for (int u : socialMatrix.columns()) {
SparseVector bu = socialMatrix.column(u);
for (int p : bu.getIndex()) {
if (p >= trainMatrix.numRows())
continue;
SparseVector pp = trainMatrix.row(p);
SparseVector tp = socialMatrix.row(p);
int[] tps = tp.getIndex();
for (int j : pp.getIndex()) {
// compute prediction for user-item (p, j)
double pred1 = DenseMatrix.rowMult(P, p, Q, j);
double sum = 0.0, ws = 0.0;
for (int k : tps) {
double tuk = tp.get(k);
sum += tuk * DenseMatrix.rowMult(P, k, Q, j);
ws += tuk;
}
double pred2 = ws > 0 ? sum / ws : 0;
double pred = alpha * pred1 + (1 - alpha) * pred2;
// double pred = predict(p, j, false);
double epj = g(pred) - normalize(pp.get(j));
double csgd = gd(pred) * epj * bu.get(p);
for (int f = 0; f < numFactors; f++)
PS.add(u, f, (1 - alpha) * csgd * Q.get(j, f));
}
}
}
loss *= 0.5;
P = P.add(PS.scale(-lRate));
Q = Q.add(QS.scale(-lRate));
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int j, boolean bound) {
double pred1 = DenseMatrix.rowMult(P, u, Q, j);
double sum = 0.0, ws = 0.0;
SparseVector tu = socialMatrix.row(u);
for (int k : tu.getIndex()) {
double tuk = tu.get(k);
sum += tuk * DenseMatrix.rowMult(P, k, Q, j);
ws += tuk;
}
double pred2 = ws > 0 ? sum / ws : 0;
double pred = alpha * pred1 + (1 - alpha) * pred2;
if (bound)
return denormalize(g(pred));
return pred;
}
@Override
public String toString() {
return Strings.toString(new Object[] { initLRate, maxLRate, regU, regI, numFactors, numIters, isBoldDriver,
alpha }, ",");
}
}
| 4,772 | 24.66129 | 124 | java |
librec | librec-master/librec/src/main/java/librec/rating/SoRec.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import java.util.HashMap;
import java.util.Map;
import librec.data.AddConfiguration;
import librec.data.DenseMatrix;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.SocialRecommender;
/**
* Hao Ma, Haixuan Yang, Michael R. Lyu and Irwin King, <strong>SoRec: Social recommendation using probabilistic matrix
* factorization</strong>, ACM CIKM 2008.
*
* @author guoguibing
*
*/
@AddConfiguration(before = "regC, regZ")
public class SoRec extends SocialRecommender {
private DenseMatrix Z;
private float regC, regZ;
private Map<Integer, Integer> inDegrees, outDegrees;
public SoRec(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
initByNorm = false;
}
@Override
protected void initModel() throws Exception {
super.initModel();
Z = new DenseMatrix(numUsers, numFactors);
Z.init();
regC = algoOptions.getFloat("-c");
regZ = algoOptions.getFloat("-z");
inDegrees = new HashMap<>();
outDegrees = new HashMap<>();
for (int u = 0; u < numUsers; u++) {
int in = socialMatrix.columnSize(u);
int out = socialMatrix.rowSize(u);
inDegrees.put(u, in);
outDegrees.put(u, out);
}
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
DenseMatrix PS = new DenseMatrix(numUsers, numFactors);
DenseMatrix QS = new DenseMatrix(numItems, numFactors);
DenseMatrix ZS = new DenseMatrix(numUsers, numFactors);
// ratings
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j, false);
double euj = g(pred) - normalize(ruj);
loss += euj * euj;
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double qjf = Q.get(j, f);
PS.add(u, f, gd(pred) * euj * qjf + regU * puf);
QS.add(j, f, gd(pred) * euj * puf + regI * qjf);
loss += regU * puf * puf + regI * qjf * qjf;
}
}
// friends
for (MatrixEntry me : socialMatrix) {
int u = me.row();
int v = me.column();
double tuv = me.get(); // tuv ~ cik in the original paper
if (tuv <= 0)
continue;
double pred = DenseMatrix.rowMult(P, u, Z, v);
int vminus = inDegrees.get(v); // ~ d-(k)
int uplus = outDegrees.get(u); // ~ d+(i)
double weight = Math.sqrt(vminus / (uplus + vminus + 0.0));
double euv = g(pred) - weight * tuv; // weight * tuv ~ cik*
loss += regC * euv * euv;
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f);
double zvf = Z.get(v, f);
PS.add(u, f, regC * gd(pred) * euv * zvf);
ZS.add(v, f, regC * gd(pred) * euv * puf + regZ * zvf);
loss += regZ * zvf * zvf;
}
}
P = P.add(PS.scale(-lRate));
Q = Q.add(QS.scale(-lRate));
Z = Z.add(ZS.scale(-lRate));
loss *= 0.5;
if (isConverged(iter))
break;
}
}
@Override
public double predict(int u, int j, boolean bounded) {
double pred = DenseMatrix.rowMult(P, u, Q, j);
if (bounded)
return denormalize(g(pred));
return pred;
}
@Override
public String toString() {
return regC + ", " + regZ + ", " + super.toString();
}
}
| 3,961 | 23.918239 | 119 | java |
librec | librec-master/librec/src/main/java/librec/rating/PMF.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
/**
* <ul>
* <li><strong>PMF:</strong> Ruslan Salakhutdinov and Andriy Mnih, Probabilistic Matrix Factorization, NIPS 2008.</li>
* <li><strong>RegSVD:</strong> Arkadiusz Paterek, <strong>Improving Regularized Singular Value Decomposition
* Collaborative Filtering, Proceedings of KDD Cup and Workshop, 2007.</li>
* </ul>
*
* @author Guo Guibing
*
*/
public class PMF extends IterativeRecommender {
public PMF(SparseMatrix rm, SparseMatrix tm, int fold) {
super(rm, tm, fold);
}
@Override
protected void buildModel() throws Exception {
for (int iter = 1; iter <= numIters; iter++) {
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row(); // user
int j = me.column(); // item
double ruj = me.get();
double puj = predict(u, j, false);
double euj = ruj - puj;
loss += euj * euj;
// update factors
for (int f = 0; f < numFactors; f++) {
double puf = P.get(u, f), qjf = Q.get(j, f);
P.add(u, f, lRate * (euj * qjf - regU * puf));
Q.add(j, f, lRate * (euj * puf - regI * qjf));
loss += regU * puf * puf + regI * qjf * qjf;
}
}
loss *= 0.5;
if (isConverged(iter))
break;
}// end of training
}
}
| 2,043 | 24.55 | 118 | java |
librec | librec-master/librec/src/main/java/librec/rating/RfRec.java | // Copyright (C) 2014-2015 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.rating;
import librec.data.DenseMatrix;
import librec.data.DenseVector;
import librec.data.MatrixEntry;
import librec.data.SparseMatrix;
import librec.intf.IterativeRecommender;
/**
* Gedikli et al., <strong>RF-Rec: Fast and Accurate Computation of Recommendations based on Rating
* Frequencies</strong>, IEEE (CEC) 2011, Luxembourg, 2011, pp. 50-57. <br>
*
* <strong>Remark:</strong> This implementation does not support half-star ratings.
*
* @author bin wu
*
*/
public class RfRec extends IterativeRecommender {
/**
* The average ratings of users
*/
private DenseVector userAverages;
/**
* The average ratings of items
*/
private DenseVector itemAverages;
/**
* The number of ratings per rating value per user
*/
private DenseMatrix userRatingFrequencies;
/**
* The number of ratings per rating value per item
*/
private DenseMatrix itemRatingFrequencies;
/**
* User weights learned by the gradient solver
*/
private DenseVector userWeights;
/** Item weights learned by the gradient solver. */
private DenseVector itemWeights;
public RfRec(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
// Calculate the average ratings
userAverages = new DenseVector(numUsers);
itemAverages = new DenseVector(numItems);
userWeights = new DenseVector(numUsers);
itemWeights = new DenseVector(numItems);
for (int u = 0; u < numUsers; u++) {
userAverages.set(u, trainMatrix.row(u).mean());
userWeights.set(u, 0.6 + Math.random() * 0.01);
}
for (int j = 0; j < numItems; j++) {
itemAverages.set(j, trainMatrix.column(j).mean());
itemWeights.set(j, 0.4 + Math.random() * 0.01);
}
// Calculate the frequencies.
// Users,items
userRatingFrequencies = new DenseMatrix(numUsers, ratingScale.size());
itemRatingFrequencies = new DenseMatrix(numItems, ratingScale.size());
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
int ruj = (int) me.get();
userRatingFrequencies.add(u, ruj - 1, 1);
itemRatingFrequencies.add(j, ruj - 1, 1);
}
userWeights = new DenseVector(numUsers);
itemWeights = new DenseVector(numItems);
}
@Override
protected void buildModel() throws Exception {
for (int i = 1; i <= numIters; i++) {
loss = 0;
for (MatrixEntry me : trainMatrix) {
int u = me.row();
int j = me.column();
double ruj = me.get();
double pred = predict(u, j);
double euj = ruj - pred;
loss += euj * euj;
double userWeight = userWeights.get(u) + lRate * (euj - regU * userWeights.get(u));
userWeights.set(u, userWeight);
// Gradient-Step on item weights.
double itemWeight = itemWeights.get(j) + lRate * (euj - regI * itemWeights.get(j));
itemWeights.set(j, itemWeight);
loss += regU * userWeights.get(u) * userWeights.get(u) + regI * itemWeights.get(j) * itemWeights.get(j);
}
loss *= 0.5;
if (isConverged(i))
break;
}
}
/**
* Returns 1 if the rating is similar to the rounded average value
*
* @param avg
* the average
* @param rating
* the rating
* @return 1 when the values are equal
*/
private int isAvgRating(double avg, int rating) {
return Math.round(avg) == rating ? 1 : 0;
}
public double predict(int u, int j) {
double estimate = globalMean;
float enumeratorUser = 0;
float denominatorUser = 0;
float enumeratorItem = 0;
float denominatorItem = 0;
if (userRatingFrequencies.row(u).sum() > 0 && itemRatingFrequencies.row(j).sum() > 0 && userAverages.get(u) > 0
&& itemAverages.get(j) > 0) {
// Go through all the possible rating values
for (int r = 0; r < ratingScale.size(); ++r) {
int ratingValue = (int) Math.round(ratingScale.get(r));
// user component
int tmpUser = 0;
double frequencyInt = userRatingFrequencies.get(u, ratingValue - 1);
int frequency = (int) frequencyInt;
tmpUser = frequency + 1 + isAvgRating(userAverages.get(u), ratingValue);
enumeratorUser += tmpUser * ratingValue;
denominatorUser += tmpUser;
// item component
int tmpItem = 0;
frequency = 0;
frequencyInt = itemRatingFrequencies.get(j, ratingValue - 1);
frequency = (int) frequencyInt;
tmpItem = frequency + 1 + isAvgRating(itemAverages.get(j), ratingValue);
enumeratorItem += tmpItem * ratingValue;
denominatorItem += tmpItem;
}
double w_u = userWeights.get(u);
double w_i = itemWeights.get(j);
float pred_ui_user = enumeratorUser / denominatorUser;
float pred_ui_item = enumeratorItem / denominatorItem;
estimate = (float) w_u * pred_ui_user + (float) w_i * pred_ui_item;
} else {
// if the user or item weren't known in the training phase...
if (userRatingFrequencies.row(u).sum() == 0 || userAverages.get(u) == 0) {
double iavg = itemAverages.get(j);
if (iavg != 0) {
return iavg;
} else {
return globalMean;
}
}
if (itemRatingFrequencies.row(j).sum() == 0 || itemAverages.get(j) == 0) {
double uavg = userAverages.get(u);
if (uavg != 0) {
return uavg;
} else {
// Some heuristic -> a bit above the average rating
return globalMean;
}
}
}
return estimate;
}
}
| 6,020 | 28.806931 | 113 | java |
librec | librec-master/librec/src/main/java/librec/ext/AR.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ext;
import java.util.Map.Entry;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.Recommender;
/**
*
* Choonho Kim and Juntae Kim, <strong>A Recommendation Algorithm Using Multi-Level Association Rules</strong>, WI 2003.
*
* <p>
* Simple Association Rule Recommender: we do not consider the item categories (or multi levels) used in the original
* paper. Besides, we consider all association rules without ruling out weak ones (by setting high support and
* confidence threshold).
* </p>
*
* @author guoguibing
*
*/
public class AR extends Recommender {
// confidence matrix of association rules
private Table<Integer, Integer, Double> A;
public AR(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
// cannot predict ratings, but only order preferences
isRankingPred = true;
}
@Override
protected void initModel() throws Exception {
super.initModel();
A = HashBasedTable.create(numItems, numItems);
userCache = trainMatrix.rowCache(cacheSpec);
}
@Override
protected void buildModel() throws Exception {
// simple rule: X => Y, given that each user vector is regarded as a
// transaction
for (int x = 0; x < numItems; x++) {
// all transactions for item x
SparseVector qx = trainMatrix.column(x);
int total = qx.getCount();
for (int y = 0; y < numItems; y++) {
// compute confidence where containing item y among qx
int count = 0;
for (VectorEntry ve : qx) {
int u = ve.index();
double ruy = trainMatrix.get(u, y);
if (ruy > 0)
count++;
}
if (count > 0) {
double conf = (count + 0.0) / total;
A.put(x, y, conf);
}
}
}
}
@Override
public double ranking(int u, int j) throws Exception {
SparseVector pu = userCache.get(u);
double rank = 0;
for (Entry<Integer, Double> en : A.column(j).entrySet()) {
int i = en.getKey();
double support = en.getValue();
rank += pu.get(i) * support;
}
return rank;
}
}
| 2,879 | 25.422018 | 120 | java |
librec | librec-master/librec/src/main/java/librec/ext/External.java | package librec.ext;
import librec.data.SparseMatrix;
import librec.intf.Recommender;
/**
* <p>
* Suppose that you have some predictive ratings (in "pred.txt") generated by an external recommender (e.g., some
* recommender of MyMediaLite). The predictions are in the format of user-item-prediction. These predictions are
* corresponding to a test set "test.txt" (user-item-held_out_rating). This class (ExternalRecommender) provides you
* with the ability to compute predictive performance by setting the training set as "pred.txt" and the test set as
* "test.txt". <br>
* <br>
*
* <strong>NOTE:</strong> This approach is not applicable to item recommendation. Thank {@literal Marcel Ackermann} for
* bringing this demand to my attention.
* </p>
*
* @author Guo Guibing
*
*/
public class External extends Recommender {
public External(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
isRankingPred = false; // not applicable to item recommendation
}
@Override
public double predict(int u, int j, boolean bound) throws Exception {
return trainMatrix.get(u, j);
}
}
| 1,149 | 30.944444 | 119 | java |
librec | librec-master/librec/src/main/java/librec/ext/PD.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ext;
import java.util.HashMap;
import java.util.Map;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.data.VectorEntry;
import librec.intf.Recommender;
import librec.util.Lists;
/**
* Pennock et al., <strong>Collaborative Filtering by Personality Diagnosis: A Hybrid Memory- and Model-based
* Approach</strong>, UAI 2000.
*
* <p>
* Related Work:
* <ul>
* <a href= "http://www.cs.carleton.edu/cs_comps/0607/recommend/recommender/pd.html">A brief introduction to Personality
* Diagnosis</a></li>
* </p>
*
* @author guoguibing
*
*/
public class PD extends Recommender {
// Gaussian noise: 2.5 suggested in the paper
private float sigma;
// prior probability
private double prior;
public PD(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
}
@Override
protected void initModel() throws Exception {
prior = 1.0 / numUsers;
sigma = algoOptions.getFloat("-sigma");
}
@Override
public double predict(int a, int j) {
Map<Double, Double> scaleProbs = new HashMap<>();
SparseVector pa = trainMatrix.row(a);
SparseVector qj = trainMatrix.column(j);
for (double h : ratingScale) {
double prob = 0.0;
for (VectorEntry ve : qj) {
// other users who rated item j
int i = ve.index();
double rij = ve.get();
SparseVector pi = trainMatrix.row(i);
double prod = 1.0;
for (VectorEntry ae : pa) {
int l = ae.index();
double ral = ae.get();
double ril = pi.get(l);
if (ril > 0)
prod *= gaussian(ral, ril, sigma);
}
prob += gaussian(h, rij, sigma) * prod;
}
prob *= prior;
scaleProbs.put(h, prob);
}
return Lists.sortMap(scaleProbs, true).get(0).getKey();
}
@Override
public String toString() {
return super.toString() + "," + sigma;
}
}
| 2,553 | 24.287129 | 120 | java |
librec | librec-master/librec/src/main/java/librec/ext/Hybrid.java | // Copyright (C) 2014 Guibing Guo
//
// This file is part of LibRec.
//
// LibRec is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// LibRec is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with LibRec. If not, see <http://www.gnu.org/licenses/>.
//
package librec.ext;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import librec.data.SparseMatrix;
import librec.data.SparseVector;
import librec.intf.Recommender;
import librec.util.Lists;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* Zhou et al., <strong>Solving the apparent diversity-accuracy dilemma of recommender systems</strong>, Proceedings of
* the National Academy of Sciences, 2010.
*
* @author guoguibing
*
*/
public class Hybrid extends Recommender {
Table<Integer, Integer, Double> userItemRanks = HashBasedTable.create();
Table<Integer, Integer, Double> heatScores = HashBasedTable.create();
Table<Integer, Integer, Double> probScores = HashBasedTable.create();
protected float lambda;
Map<Integer, Integer> userDegrees = new HashMap<>();
Map<Integer, Integer> itemDegrees = new HashMap<>();
double maxProb = Double.MIN_VALUE, maxHeat = Double.MIN_VALUE;
public Hybrid(SparseMatrix trainMatrix, SparseMatrix testMatrix, int fold) {
super(trainMatrix, testMatrix, fold);
algoName = "Hybrid (HeatS+ProbS)";
isRankingPred = true;
lambda = algoOptions.getFloat("-lambda");
}
@Override
protected void initModel() throws Exception {
for (int j = 0; j < numItems; j++)
itemDegrees.put(j, trainMatrix.columnSize(j));
}
protected double ranking_basic(int u, int j) {
// Note that in ranking, we first check a user u, and then check the
// ranking score of each candidate items
if (!heatScores.containsRow(u)) {
// new user
heatScores.clear();
probScores.clear();
SparseVector uv = trainMatrix.row(u);
List<Integer> items = Lists.toList(uv.getIndex());
// distribute resources to users, including user u
Map<Integer, Double> userResources = new HashMap<>();
for (int v = 0; v < numUsers; v++) {
SparseVector vv = trainMatrix.row(v);
double sum = 0.0;
int kj = vv.getCount();
for (int item : vv.getIndex())
sum += items.contains(item) ? 1.0 : 0.0;
userResources.put(v, kj > 0 ? sum / kj : 0.0);
}
// redistribute resources to items
maxHeat = Double.MIN_VALUE;
for (int i = 0; i < numItems; i++) {
SparseVector iv = trainMatrix.column(i);
double sum = 0;
int kj = iv.getCount();
for (int user : iv.getIndex())
sum += userResources.get(user);
double score = kj > 0 ? sum / kj : 0.0;
heatScores.put(u, i, score);
if (score > maxHeat)
maxHeat = score;
}
// prob scores
userResources.clear();
for (int v = 0; v < numUsers; v++) {
SparseVector vv = trainMatrix.row(v);
double sum = 0.0;
for (int item : vv.getIndex())
sum += items.contains(item) ? 1.0 / itemDegrees.get(item) : 0.0;
userResources.put(v, sum);
}
maxProb = Double.MIN_VALUE;
for (int i = 0; i < numItems; i++) {
SparseVector iv = trainMatrix.column(i);
double score = 0;
for (int user : iv.getIndex())
score += userResources.get(user) / userDegrees.get(user);
probScores.put(u, i, score);
if (score > maxProb)
maxProb = score;
}
}
return heatScores.contains(u, j) ? heatScores.get(u, j) / maxHeat * (1 - lambda) + probScores.get(u, j)
/ maxProb * lambda : 0.0;
}
public double ranking(int u, int j) {
// Note that in ranking, we first check a user u, and then check the
// ranking score of each candidate items
if (!userItemRanks.containsRow(u)) {
// new user
userItemRanks.clear();
SparseVector uv = trainMatrix.row(u);
List<Integer> items = Lists.toList(uv.getIndex());
// distribute resources to users, including user u
Map<Integer, Double> userResources = new HashMap<>();
for (int v = 0; v < numUsers; v++) {
SparseVector vv = trainMatrix.row(v);
double sum = 0;
int kj = vv.getCount();
for (int item : vv.getIndex()) {
if (items.contains(item))
sum += 1.0 / Math.pow(itemDegrees.get(item), lambda);
}
if (kj > 0)
userResources.put(v, sum / kj);
}
// redistribute resources to items
for (int i = 0; i < numItems; i++) {
if (items.contains(i))
continue;
SparseVector iv = trainMatrix.column(i);
double sum = 0;
for (int user : iv.getIndex())
sum += userResources.containsKey(user) ? userResources.get(user) : 0.0;
double score = sum / Math.pow(itemDegrees.get(i), 1 - lambda);
userItemRanks.put(u, i, score);
}
}
return userItemRanks.contains(u, j) ? userItemRanks.get(u, j) : 0.0;
}
/**
* for validity purpose
*/
protected double ProbS(int u, int j) {
if (!userItemRanks.containsRow(u)) {
userItemRanks.clear();
SparseVector uv = trainMatrix.row(u);
List<Integer> items = Lists.toList(uv.getIndex());
// distribute resources to users, including user u
Map<Integer, Double> userResources = new HashMap<>();
for (int v = 0; v < numUsers; v++) {
SparseVector vv = trainMatrix.row(v);
double sum = 0;
for (int item : vv.getIndex()) {
if (items.contains(item))
sum += 1.0 / itemDegrees.get(item);
}
userResources.put(v, sum);
}
// redistribute resources to items
for (int i = 0; i < numItems; i++) {
if (items.contains(i))
continue;
SparseVector iv = trainMatrix.column(i);
double sum = 0;
for (int user : iv.getIndex())
sum += userResources.get(user) / userDegrees.get(user);
double score = sum;
userItemRanks.put(u, i, score);
}
}
return userItemRanks.contains(u, j) ? userItemRanks.get(u, j) : 0.0;
}
@Override
public String toString() {
return super.toString() + "," + lambda;
}
}
| 6,324 | 26.863436 | 119 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.