Example usage for org.apache.mahout.cf.taste.impl.recommender.svd SVDRecommender SVDRecommender

List of usage examples for org.apache.mahout.cf.taste.impl.recommender.svd SVDRecommender SVDRecommender

Introduction

In this page you can find the example usage for org.apache.mahout.cf.taste.impl.recommender.svd SVDRecommender SVDRecommender.

Prototype

public SVDRecommender(DataModel dataModel, Factorizer factorizer) throws TasteException 

Source Link

Usage

From source file:de.apaxo.bedcon.AnimalFoodRecommender.java

License:Open Source License

public void initRecommender() {
    try {/*  w  ww .j a  va  2 s  . c  om*/

        PearsonCorrelationSimilarity pearsonSimilarity = new PearsonCorrelationSimilarity(model);

        // Java: Similarity between Wolf and Bear: 0.8196561646738477
        // R: corr(c(8,3,1),c(8,7,2)): 0.8196562
        System.out.println("Similarity between Wolf and Bear: "
                + pearsonSimilarity.userSimilarity(id2thing.toLongID("Wolf"), id2thing.toLongID("Bear")));
        // Similarity between Wolf and Rabbit: -0.6465846072812313
        // R: cor(c(8,3,1),c(2,1,10)): -0.6465846
        System.out.println("Similarity between Wolf and Rabbit: "
                + pearsonSimilarity.userSimilarity(id2thing.toLongID("Wolf"), id2thing.toLongID("Rabbit")));
        // Similarity between Wolf and Pinguin: -0.24019223070763077
        // R: cor(c(8,3,1),c(2,10,2)): -0.2401922
        System.out.println("Similarity between Wolf and Pinguin: "
                + pearsonSimilarity.userSimilarity(id2thing.toLongID("Wolf"), id2thing.toLongID("Pinguin")));

        GenericUserBasedRecommender recommender = new GenericUserBasedRecommender(model,
                new NearestNUserNeighborhood(3, pearsonSimilarity, model), pearsonSimilarity);
        for (RecommendedItem r : recommender.recommend(id2thing.toLongID("Wolf"), 3)) {
            // Pork:
            // (0.8196561646738477 * 8 + (-0.6465846072812313) * 1) / (0.8196561646738477 + (-0.6465846072812313)) = 34,15157 ~ 10
            // Grass:
            // (2*(-0.24019223070763077)+7*(-0.6465846072812313)) / ((-0.24019223070763077) + (-0.6465846072812313)) = 5,65
            // Corn:
            // (2*(-0.24019223070763077)+2*(0.8196561646738477)) / (-0.24019223070763077+0.8196561646738477) = 2
            System.out.println("UserBased: Wolf should eat: " + id2thing.toStringID(r.getItemID()) + " Rating: "
                    + r.getValue());
        }
        SVDRecommender svdrecommender = new SVDRecommender(model, new SVDPlusPlusFactorizer(model, 4, 1000));
        for (RecommendedItem r : svdrecommender.recommend(id2thing.toLongID("Sheep"), 3)) {
            System.out.println("SVD: Sheep should eat: " + id2thing.toStringID(r.getItemID()) + " Rating: "
                    + r.getValue());
        }
    } catch (TasteException e) {
        e.printStackTrace();
    }
}

From source file:edu.carleton.comp4601.cf.dao.SimpleDataRecommender.java

License:Open Source License

public void initRecommender() {
    try {//from   w  ww . ja  va 2 s .c  o  m

        PearsonCorrelationSimilarity pearsonSimilarity = new PearsonCorrelationSimilarity(model);

        System.out.println("Similarity between Alice and User1: "
                + pearsonSimilarity.userSimilarity(id2thing.toLongID("Alice"), id2thing.toLongID("User1")));
        System.out.println("Similarity between Alice and User2: "
                + pearsonSimilarity.userSimilarity(id2thing.toLongID("Alice"), id2thing.toLongID("User2")));
        System.out.println("Similarity between Alice and User3: "
                + pearsonSimilarity.userSimilarity(id2thing.toLongID("Alice"), id2thing.toLongID("User3")));

        GenericUserBasedRecommender recommender = new GenericUserBasedRecommender(model,
                new NearestNUserNeighborhood(3, pearsonSimilarity, model), pearsonSimilarity);
        for (RecommendedItem r : recommender.recommend(id2thing.toLongID("Alice"), 3)) {
            System.out.println("UserBased: Alice should like: " + id2thing.toStringID(r.getItemID())
                    + " Rating: " + r.getValue());
        }
        SVDRecommender svdrecommender = new SVDRecommender(model, new SVDPlusPlusFactorizer(model, 4, 1000));
        for (RecommendedItem r : svdrecommender.recommend(id2thing.toLongID("User1"), 3)) {
            System.out.println("SVD: User1 should like: " + id2thing.toStringID(r.getItemID()) + " Rating: "
                    + r.getValue());
        }
    } catch (TasteException e) {
        e.printStackTrace();
    }
}

From source file:edu.nudt.c6.datasetlinking.mahout.MyRecommenderBuilder.java

License:Apache License

@Override
public Recommender buildRecommender(DataModel dataModel) throws TasteException {
    if (recommenderType == RECOMMENDER.ITEM) {
        ItemSimilarity itemSimilarity = null;
        switch (similarityType) {
        case PEARSON:
            itemSimilarity = new PearsonCorrelationSimilarity(dataModel);
            break;
        case PEARSON_WEIGHTED:
            itemSimilarity = new PearsonCorrelationSimilarity(dataModel, Weighting.WEIGHTED);
            break;
        case COSINE:
            itemSimilarity = new UncenteredCosineSimilarity(dataModel);
            break;
        case TANIMOTO:
            itemSimilarity = new TanimotoCoefficientSimilarity(dataModel);
            break;
        case LOGLIKELIHOOD:
            itemSimilarity = new LogLikelihoodSimilarity(dataModel);
            break;
        case CITYBLOCK:
            itemSimilarity = new CityBlockSimilarity(dataModel);
            break;
        case EUCLIDEAN:
            itemSimilarity = new EuclideanDistanceSimilarity(dataModel);
            break;
        case EUCLIDEAN_WEIGHTED:
            itemSimilarity = new EuclideanDistanceSimilarity(dataModel, Weighting.WEIGHTED);
            break;
        case DATASET_VOCABULARY_COSINE:
            try {
                itemSimilarity = new DatasetVocabularySimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();/*from  w w w.  jav  a2 s  .  c o  m*/
            }
            break;
        case DATASET_CLASS_COSINE:
            try {
                itemSimilarity = new DatasetClassSimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        case DATASET_PPROPERTY_COSINE_SUBJECTS:
            try {
                itemSimilarity = new DatasetPropertySubjectsSimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        case DATASET_PPROPERTY_COSINE_TRIPLES:
            try {
                itemSimilarity = new DatasetPropertyTriplesSimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        default:
            itemSimilarity = new EuclideanDistanceSimilarity(dataModel);
        }

        if (pref) {
            return new GenericItemBasedRecommender(dataModel, itemSimilarity);
        } else {
            return new GenericBooleanPrefItemBasedRecommender(dataModel, itemSimilarity);
        }

    } else if (recommenderType == RECOMMENDER.USER) {
        UserSimilarity userSimilarity = null;
        switch (similarityType) {
        case PEARSON:
            userSimilarity = new PearsonCorrelationSimilarity(dataModel);
            break;
        case PEARSON_WEIGHTED:
            userSimilarity = new PearsonCorrelationSimilarity(dataModel, Weighting.WEIGHTED);
            break;
        case COSINE:
            userSimilarity = new UncenteredCosineSimilarity(dataModel);
            break;
        case SPEARMAN:
            userSimilarity = new SpearmanCorrelationSimilarity(dataModel);
            break;
        case TANIMOTO:
            userSimilarity = new TanimotoCoefficientSimilarity(dataModel);
            break;
        case LOGLIKELIHOOD:
            userSimilarity = new LogLikelihoodSimilarity(dataModel);
            break;
        case CITYBLOCK:
            userSimilarity = new CityBlockSimilarity(dataModel);
            break;
        case EUCLIDEAN:
            userSimilarity = new EuclideanDistanceSimilarity(dataModel);
            break;
        case EUCLIDEAN_WEIGHTED:
            userSimilarity = new EuclideanDistanceSimilarity(dataModel, Weighting.WEIGHTED);
            break;
        case DATASET_VOCABULARY_COSINE:
            try {
                userSimilarity = new DatasetVocabularySimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        case DATASET_CLASS_COSINE:
            try {
                userSimilarity = new DatasetClassSimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        case DATASET_PPROPERTY_COSINE_SUBJECTS:
            try {
                userSimilarity = new DatasetPropertySubjectsSimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        case DATASET_PPROPERTY_COSINE_TRIPLES:
            try {
                userSimilarity = new DatasetPropertyTriplesSimilarity(dataModel);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        default:
            userSimilarity = new EuclideanDistanceSimilarity(dataModel);
        }

        UserNeighborhood userNeighborhood = null;
        switch (neighborhoodType) {
        case NEAREST:
            userNeighborhood = new NearestNUserNeighborhood(this.nearestNum, userSimilarity, dataModel);
            break;
        case THRESHOLD:
        default:
            userNeighborhood = new ThresholdUserNeighborhood(this.neighborThreshold, userSimilarity, dataModel);
        }

        if (pref) {
            return new GenericUserBasedRecommender(dataModel, userNeighborhood, userSimilarity);
        } else {
            return new GenericBooleanPrefUserBasedRecommender(dataModel, userNeighborhood, userSimilarity);
        }
    } else if (recommenderType == RECOMMENDER.SVD) {
        AbstractFactorizer factorizer = null;

        switch (SVDfactorizerType) {
        case RatingSGD:
            factorizer = new RatingSGDFactorizer(dataModel, factorNum, iterationNum);
            break;
        case ALSWR:
            factorizer = new ALSWRFactorizer(dataModel, factorNum, lambda, iterationNum);
            break;
        case SVDPlusPlus:
            factorizer = new SVDPlusPlusFactorizer(dataModel, factorNum, iterationNum);
            break;
        case ParallelSGD:
            factorizer = new ParallelSGDFactorizer(dataModel, factorNum, lambda, iterationNum);
            break;
        case MyRatingSGD:
            factorizer = new MyRatingSGDFactorizer(dataModel, factorNum, iterationNum);
            break;
        }

        return new SVDRecommender(dataModel, factorizer);

    } else if (recommenderType == RECOMMENDER.LINKDOCUMENT) {

    } else if (recommenderType == RECOMMENDER.CollaborativeRanking) {
        AbstractCRFactorizer factorizer = null;

        switch (CRFactorizerType) {
        case BasicLFM:
            try {
                factorizer = new BasicLFMFactorizer(dataModel, factorNum, iterationNum);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        case LFMTrans:
            try {
                factorizer = new LFMTransFactorizer(dataModel, factorNum, iterationNum, learningRate);
            } catch (ClassNotFoundException | IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            break;
        }
        return new CollaborativeRankingRecommender(dataModel, factorizer);
    } else if (recommenderType == RECOMMENDER.Random) {
        return new RandomRecommender(dataModel);
    } else if (recommenderType == RECOMMENDER.ItemAverage) {
        return new ItemAverageRecommender(dataModel);
    } else if (recommenderType == RECOMMENDER.ItemUserAverage) {
        return new ItemUserAverageRecommender(dataModel);
    }

    return null;

}

From source file:norbert.mynemo.core.recommendation.recommender.SvdBasedRecommender.java

License:Apache License

@Override
public Recommender buildRecommender(DataModel dataModel) throws TasteException {
    if (!configuration.allowCachedFactorizationReuse()) {

        // create a new factorizer each time
        return new SVDRecommender(dataModel, createFactorizer(dataModel));

    }//  w  ww .ja  v  a2  s.c om

    if (cachedFactorizer == null) {
        // lazy initialization
        cachedFactorizer = createFactorizer(configuration.getDataModel());
        try {
            persistenceStrategy = new RamPersistenceStrategy();
            persistenceStrategy.maybePersist(cachedFactorizer.factorize());
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    return new SVDRecommender(dataModel, cachedFactorizer, persistenceStrategy);
}

From source file:recommender.MyRecommender.java

public void init() {
    if (svdRecommender == null) {
        try {//from  w  w w  .  j  ava  2  s.  c om
            // load properties file, includes database settings (url, username, password) and connection pool maximum number of connections (used by ConnectionPool)
            prop = new Properties();
            prop.load(Recommender.class.getResourceAsStream("settings.properties"));

            // load settings from MySQL database
            loadSettings();

            // load macro classes and sub classes into bidirectional map
            loadCategories();

            // load users profiles
            loadUsersProfiles();

            // load groups with their priorities
            loadGroups();

            // load groups with their translations
            loadGroupsLangs();

            // JDBC data model
            mysql_datasource = new MysqlDataSource();

            mysql_datasource.setServerName(prop.getProperty("db_hostname"));
            mysql_datasource.setUser(prop.getProperty("db_username"));
            mysql_datasource.setPassword(prop.getProperty("db_password"));
            mysql_datasource.setDatabaseName("recommender");

            /*dm = new MySQLJDBCDataModel(
             mysql_datasource, "preferences", "user_id",
             "item_id", "preference", "timestamp");*/
            dm = new MySQLJDBCDataModel(mysql_datasource, "preferences", "user_id", "item_id", "preference",
                    "timestamp");

            // Switching to MEMORY mode. Load all data from database into memory first
            // there is no need of a ConnectionPool because this technique uses a memory-based ReloadFromJDBCDataModel wrapper,
            // decreasing the number of connections to 1
            rdm = new ReloadFromJDBCDataModel((JDBCDataModel) dm);

            // Factorize matrix
            // factorizes the rating matrix using "Alternating-Least-Squares with Weighted--Regularization" as described in the paper
            // "Large-scale Collaborative Filtering for the Netflix Prize" http://machinelearning202.pbworks.com/w/file/fetch/60922097/netflix_aaim08%28submitted%29.pdf
            factorizer = new ALSWRFactorizer(rdm, 2, 0.025, 3);

            // Configure SVD algorithm
            svdRecommender = new SVDRecommender(rdm, factorizer);
        } catch (IOException | TasteException ex) {
            java.util.logging.Logger.getLogger(Recommender.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:smartcityrecommender.Recommender.java

License:Open Source License

public static void init() {
    if (svdRecommender == null) {
        try {/*ww w  .ja  v a  2 s.  c o  m*/
            // load properties file, includes database settings (url, username, password) and connection pool maximum number of connections (used by ConnectionPool)
            prop = new Properties();
            prop.load(Recommender.class.getResourceAsStream("settings.properties"));

            // load settings from MySQL database
            loadSettings();

            //start logging this recommender stats to db every n seconds, read from settings (recommenderLoggingPeriod)
            scheduledThreadPool = Executors.newScheduledThreadPool(1);
            RecommenderLoggerStatus logger = new RecommenderLoggerStatus();

            scheduledThreadPool.scheduleAtFixedRate(logger, 1, 86400, TimeUnit.SECONDS); // the logging period is 1 day

            // load macro classes and sub classes into bidirectional map
            loadCategories();

            // load users profiles
            loadUsersProfiles();

            // load groups with their priorities
            loadGroups();

            // load groups with their translations
            loadGroupsLangs();

            // JDBC data model
            mysql_datasource = new MysqlDataSource();

            mysql_datasource.setServerName(prop.getProperty("db_hostname"));
            mysql_datasource.setUser(prop.getProperty("db_username"));
            mysql_datasource.setPassword(prop.getProperty("db_password"));
            mysql_datasource.setDatabaseName("recommender");

            dm = new MySQLJDBCDataModel(mysql_datasource, "preferences", "user_id", "item_id", "preference",
                    "timestamp");

            // Switching to MEMORY mode. Load all data from database into memory first
            // there is no need of a ConnectionPool because this technique uses a memory-based ReloadFromJDBCDataModel wrapper,
            // decreasing the number of connections to 1
            rdm = new ReloadFromJDBCDataModel((JDBCDataModel) dm);

            // Factorize matrix
            // factorizes the rating matrix using "Alternating-Least-Squares with Weighted--Regularization" as described in the paper
            // "Large-scale Collaborative Filtering for the Netflix Prize" http://machinelearning202.pbworks.com/w/file/fetch/60922097/netflix_aaim08%28submitted%29.pdf
            factorizer = new ALSWRFactorizer(rdm, 2, 0.025, 3);

            // Configure SVD algorithm
            svdRecommender = new SVDRecommender(rdm, factorizer);
        } catch (IOException | TasteException ex) {
            Logger.getLogger(Recommender.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:tv.icntv.recommend.algorithm.test.RecommendFactory.java

License:Apache License

public static RecommenderBuilder svdRecommender(final Factorizer factorizer) throws TasteException {
    return new RecommenderBuilder() {
        @Override/*from  www. j  ava  2  s  . c  om*/
        public Recommender buildRecommender(DataModel dataModel) throws TasteException {
            return new SVDRecommender(dataModel, factorizer);
        }
    };
}

From source file:uit.tkorg.pr.method.cf.SVDCF.java

/**
 *
 * @param inputFile/*from   ww w . j  a  va  2 s  .  c o  m*/
 * @param n
 * @param numFeatures
 * @param lamda
 * @param numIterations
 * @param outputFile
 * @throws IOException
 * @throws TasteException
 */
public static void SVDRecommendation(String inputFile, int n, int numFeatures, double lamda, int numIterations,
        String outputFile) throws IOException, TasteException {
    DataModel dataModel = new FileDataModel(new File(inputFile));
    Factorizer factorizer = new ALSWRFactorizer(dataModel, numFeatures, lamda, numIterations);
    Recommender svdRecommender = new SVDRecommender(dataModel, factorizer);
    BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));

    // Recommend n items for each user
    for (LongPrimitiveIterator iterator = dataModel.getUserIDs(); iterator.hasNext();) {
        long userId = iterator.nextLong();

        // Generate a list of n recommendations for the user
        List<RecommendedItem> topItems = svdRecommender.recommend(userId, n);
        if (!topItems.isEmpty()) {
            // Display the list of recommendations
            for (RecommendedItem recommendedItem : topItems) {
                bw.write(
                        userId + "," + recommendedItem.getItemID() + "," + recommendedItem.getValue() + "\r\n");
            }
        }
    }
    bw.close();
}

From source file:uit.tkorg.pr.method.cf.SVDCF.java

public static void computeCFRatingAndPutIntoModelForAuthorList(String inputFile, int numFeatures, double lamda,
        int numIterations, HashMap<String, Author> authorTestSet, HashSet<String> paperIdsInTestSet,
        String outputFile) throws IOException, TasteException {
    DataModel dataModel = new FileDataModel(new File(inputFile));
    Factorizer factorizer = new ALSWRFactorizer(dataModel, numFeatures, lamda, numIterations);

    Recommender svdRecommender = new SVDRecommender(dataModel, factorizer);

    FileUtils.deleteQuietly(new File(outputFile));
    try (BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile))) {
        int count = 0;
        System.out.println("Number of users:" + authorTestSet.size());
        for (LongPrimitiveIterator iterator = dataModel.getUserIDs(); iterator.hasNext();) {
            long userId = iterator.nextLong();
            // Generate a list of n recommendations for the user
            if (authorTestSet.containsKey(String.valueOf(userId).trim())) {
                System.out.println("Computing CF rating value for user no. " + count);
                List<RecommendedItem> recommendationList = svdRecommender.recommend(userId,
                        dataModel.getNumItems());
                if (!recommendationList.isEmpty()) {
                    // Display the list of recommendations
                    for (RecommendedItem recommendedItem : recommendationList) {
                        String authorId = String.valueOf(userId).trim();
                        String paperId = String.valueOf(recommendedItem.getItemID()).trim();
                        if (paperIdsInTestSet.contains(paperId)) {
                            authorTestSet.get(authorId).getCfRatingHM().put(paperId,
                                    Float.valueOf(recommendedItem.getValue()));
                            bw.write(userId + "," + recommendedItem.getItemID() + ","
                                    + recommendedItem.getValue() + "\r\n");
                        }/*from   w w  w  .j a  va  2 s. co m*/
                    }
                }
                count++;
            }
        }
    }
}