List of usage examples for org.apache.mahout.cf.taste.recommender RecommendedItem getItemID
long getItemID();
From source file:CollaborativeRecommender.java
License:Apache License
/** * Method that creates a list of recommendations. * Gets lists of recommendations from ItemRecommender.java and UserbasedRecommender.java and merges these two list to create one * list of recommendations. This list of recommendations is inserted into the database. * Already rated items are excluded. Depending on the value of the field "add", items viewed at front end may also be excluded. * /* w ww.ja v a 2 s .c om*/ * @return the size of the list of recommendations * @throws TasteException thrown if there is a exception from Mahout */ public int runCollaborativeRecommender() throws TasteException { /* itembased and userbased recommendations arrays initialized */ ArrayList<CollaborativeRecommendation> itembased = new ArrayList<CollaborativeRecommendation>(); ArrayList<CollaborativeRecommendation> userbased = new ArrayList<CollaborativeRecommendation>(); ArrayList<CollaborativeRecommendation> itemremoved = new ArrayList<CollaborativeRecommendation>(); ArrayList<CollaborativeRecommendation> userremoved = new ArrayList<CollaborativeRecommendation>(); /* Both itembased and userbased will be collected to this arraylist */ ArrayList<CollaborativeRecommendation> collaborativeRecommendations = new ArrayList<CollaborativeRecommendation>(); /* Database setup */ DatabaseConnection db = new DatabaseConnection("collaborative_view"); db.setConnection(); db.setDataModel(); DataModel model = db.getDataModel(); /* run the item and user recommenders */ ItemRecommender IR = new ItemRecommender(userId); itembased = IR.RunItemRecommender(model); UserbasedRecommender UR = new UserbasedRecommender(userId); userbased = UR.RunUserbasedRecommender(model); /* Loop through all recommendations average result from user and item based, remove duplicates */ for (CollaborativeRecommendation itemrecommendation : itembased) { float average_recommender_value = 0; for (CollaborativeRecommendation userrecommendation : userbased) { if (itemrecommendation.getItem().getItemID() == userrecommendation.getItem().getItemID()) { /* Find the average value if both user and item based has the recommendation */ average_recommender_value = (itemrecommendation.getItem().getValue() + userrecommendation.getItem().getValue()) / 2; /* Add to collaborative list and remove the recommendation from both lists */ collaborativeRecommendations.add(new CollaborativeRecommendation( new GenericRecommendedItem(itemrecommendation.getItem().getItemID(), average_recommender_value), itemrecommendation.getUserId(), "item and user based")); itemremoved.add(itemrecommendation); userremoved.add(userrecommendation); } } } /* remove duplicates present in both lists */ for (CollaborativeRecommendation recommendation : itemremoved) { itembased.remove(recommendation); } for (CollaborativeRecommendation recommendation : userremoved) { userbased.remove(recommendation); } /* add results unique to each list */ for (CollaborativeRecommendation recommendation : itembased) { collaborativeRecommendations.add(recommendation); } for (CollaborativeRecommendation recommendation : userbased) { collaborativeRecommendations.add(recommendation); } /* Sort the final results list */ Collections.sort(collaborativeRecommendations, new CompareCollaborative()); /*Find the stories that the user have rated*/ HashMap<Integer, Integer> ratedStories = db.getRated((int) userId); ArrayList<Integer> frontendStories = new ArrayList<>(); /*Find the stories already present in the recommendations list at front end * These stories should not be recommended again*/ if (add.equals("true")) { frontendStories = db.getStoriesInFrontendArray((int) userId); } /* Take the top 10 recommendations and and prepare to insert them into database */ ArrayList<DatabaseInsertObject> itemsToBeInserted = new ArrayList<>(); ArrayList<Long> idsToBeInserted = new ArrayList<>(); int ranking = 1; Random rand = new Random(); int randomDislikedRanking = rand.nextInt(6) + 5; for (CollaborativeRecommendation recommendation : collaborativeRecommendations) { /* To get a story outside of the users preferences, finds the least recommended story */ if (randomDislikedRanking == ranking) { /*Make sure the false recommendation is not already in the front end array or already among the top ten recommendation (may happen if the user doesn't have many not seen/not rated stories left) */ for (int i = 1; i < collaborativeRecommendations.size(); i++) { long dislikedStoryId = collaborativeRecommendations.get(collaborativeRecommendations.size() - i) .getItem().getItemID(); if (!frontendStories.contains((int) dislikedStoryId) && !idsToBeInserted.contains(dislikedStoryId) && ratedStories.get((int) dislikedStoryId) == null) { itemsToBeInserted.add(new DatabaseInsertObject((int) userId, "DF." + dislikedStoryId, "FalseRecommendation", 1, 0, ranking, collaborativeRecommendations .get(collaborativeRecommendations.size() - i).getItem().getValue())); idsToBeInserted.add(dislikedStoryId); System.out.print("False recommend: "); System.out.println(dislikedStoryId); break; } } ranking++; if (ranking > 10) { break; } continue; } /*If the item has not been rated,is not already in the recommendation list at front end or already a false recommendation we insert it*/ if ((ratedStories.get((int) recommendation.getItem().getItemID()) == null) && !frontendStories.contains((int) recommendation.getItem().getItemID()) && !idsToBeInserted.contains(recommendation.getItem().getItemID())) { /*Get the 30 items that had most influence on the recommendation*/ if (recommendation.getExplanation().equals("item")) { List<RecommendedItem> becauseItems = IR.getRecommender().recommendedBecause(userId, recommendation.getItem().getItemID(), 30); int counter = 1; ArrayList<RecommendedItem> explanationItems = new ArrayList<>(); for (RecommendedItem because : becauseItems) { /*Add story to explanation if this story has been rated and the rating is good*/ if (!explanationItems.contains(because) && ratedStories.get((int) because.getItemID()) != null && ratedStories.get((int) because.getItemID()) > 2) { explanationItems.add(because); counter++; } if (counter > 3) { break; } } String explanation = db.createExplanation(explanationItems); itemsToBeInserted.add(new DatabaseInsertObject((int) this.userId, "DF." + recommendation.getItem().getItemID(), explanation, 0, 1, ranking, recommendation.getItem().getValue())); idsToBeInserted.add(recommendation.getItem().getItemID()); System.out.println(recommendation.getItem()); ranking++; } else { itemsToBeInserted.add(new DatabaseInsertObject((int) this.userId, "DF." + recommendation.getItem().getItemID(), recommendation.getExplanation(), 0, 1, ranking, recommendation.getItem().getValue())); System.out.println(recommendation.getItem()); ranking++; } if (ranking > 10) { break; } } } /* Put the list of all possible recommendations in the model */ this.recommendations = collaborativeRecommendations; /*Delete the current recommendations stored in stored_story that has not been seen by the user*/ db.deleteRecommendations((int) userId); /* Insert new recommendations into the database */ db.insertUpdateRecommendValues(itemsToBeInserted); /* Close connection */ db.closeConnection(); /* Return number of recommendations possible */ return collaborativeRecommendations.size(); }
From source file:DatabaseConnection.java
License:Apache License
/** * Gets the title of the explanation stories and creates an explanation string. * The string consists of storyId:title-pair, each pair separated by commas * //from w ww.java2 s. c o m * @param explanationItems a list of the items that is the explanation of the recommendation * @return a string with the story-IDs and their title. */ public String createExplanation(ArrayList<RecommendedItem> explanationItems) { String explanation = ""; try { PreparedStatement stmt = connection.prepareStatement("SELECT title FROM story WHERE storyId=?"); for (RecommendedItem item : explanationItems) { stmt.setString(1, "DF." + item.getItemID()); ResultSet rs = stmt.executeQuery(); while (rs.next()) { explanation += "DF." + item.getItemID() + ":" + rs.getString("title") + ","; } } stmt.close(); } catch (SQLException e) { e.printStackTrace(); } /*Remove the last comma*/ explanation = explanation.replaceAll(",$", ""); return explanation; }
From source file:ContentBasedRecommender.java
License:Apache License
/** * Method the creates a list of recommendations. * Already rated stories are excluded, as are stories present in the front end array if add="true" * //ww w. jav a 2s . c om * @throws TasteException thrown if there if something went wrong with Mahout */ public void runContentBasedRecommender() throws TasteException { /*Find out where this file is located*/ try { fileLocation = new File(this.getClass().getProtectionDomain().getCodeSource().getLocation().toURI()); } catch (URISyntaxException e) { e.printStackTrace(); } /*"content"+userId is the name of the view we shall create*/ conn.setConnection(); /*Create a temporary view the includes all preferences values for this user*/ conn.createView((int) userId); /*Sets the dataModel based on the data in the created view*/ conn.setDataModel(); DataModel model = conn.getDataModel(); /*Gets all the info from the similarites.csv file into a list of objects accepted by Mahout*/ Collection<ItemItemSimilarity> sim = getStorySimilarities(); /*GenericItemBasedRecommender need an ItemSimilarity-object as input, so create an instance of this class.*/ ItemSimilarity similarity = new GenericItemSimilarity(sim); /*Create a new Recommender-instance with our datamodel and story similarities*/ GenericItemBasedRecommender recommender = new GenericItemBasedRecommender(model, similarity); /* Compute the recommendations. model.getNumItems() is the number of recommendations we want (we don't really want that many, * but we don't know how many of the top items the user already have rated), don't worry about the null, * and true tells the recommender that we want to include already known items*/ List<RecommendedItem> recommendations = recommender.recommend(userId, model.getNumItems(), null, true); /*Find the stories that the user have rated*/ HashMap<Integer, Integer> ratedStories = conn.getRated((int) userId); ArrayList<Integer> frontendStories = new ArrayList<>(); /* Find the stories already present in the recommendations list at front end * These stories should not be recommended again*/ if (add.equals("true")) { frontendStories = conn.getStoriesInFrontendArray((int) userId); } int ranking = 1; Random rand = new Random(); int randomDislikedRanking = rand.nextInt(6) + 5; ArrayList<DatabaseInsertObject> itemsToBeInserted = new ArrayList<>(); ArrayList<Long> idsToBeInserted = new ArrayList<>(); for (RecommendedItem recommendation : recommendations) { /* To get a story outside of the users preferences, finds the least recommended story */ if (randomDislikedRanking == ranking) { /*Make sure the false recommendation is not already in the front end array or already among the top ten recommendation (may happen if the user doesn't have many not seen/not rated stories left) */ for (int i = 1; i < recommendations.size(); i++) { long dislikedStoryId = recommendations.get(recommendations.size() - i).getItemID(); if (!frontendStories.contains((int) dislikedStoryId) && !idsToBeInserted.contains(dislikedStoryId) && ratedStories.get((int) dislikedStoryId) == null) { itemsToBeInserted.add(new DatabaseInsertObject((int) userId, "DF." + dislikedStoryId, "FalseRecommendation", 1, 0, ranking, recommendations.get(recommendations.size() - i).getValue())); idsToBeInserted.add(dislikedStoryId); System.out.print("False recommend: "); System.out.println(dislikedStoryId); break; } } ranking++; if (ranking > 10) { break; } continue; } /*If the item has not been rated,is not already in the recommendation list at front end or already a false recommendation we insert it*/ if ((ratedStories.get((int) recommendation.getItemID()) == null) && !frontendStories.contains((int) recommendation.getItemID()) && !idsToBeInserted.contains(recommendation.getItemID())) { /*Get the 30 items that had most influence on the recommendation*/ List<RecommendedItem> becauseItems = recommender.recommendedBecause(userId, recommendation.getItemID(), 30); int counter = 1; ArrayList<RecommendedItem> explanationItems = new ArrayList<>(); for (RecommendedItem because : becauseItems) { /*Add story to explanation if this story has been rated and the rating is good*/ if (!explanationItems.contains(because) && ratedStories.get((int) because.getItemID()) != null && ratedStories.get((int) because.getItemID()) > 2) { explanationItems.add(because); counter++; } if (counter > 3) { break; } } /*Gets the titles of the explanation-stories and creates a string*/ String explanation = conn.createExplanation(explanationItems); itemsToBeInserted.add(new DatabaseInsertObject((int) userId, "DF." + recommendation.getItemID(), explanation, 0, 0, ranking, recommendation.getValue())); idsToBeInserted.add(recommendation.getItemID()); System.out.println(recommendation); ranking++; } /*When we got 10 new recommendations, we're happy*/ if (ranking > 10) { break; } } this.recommendations = recommendations; /*Delete the current recommendations stored in stored_story that has not been seen by the user*/ conn.deleteRecommendations((int) userId); /*Insert the 10 items we found*/ conn.insertUpdateRecommendValues(itemsToBeInserted); /*Drop our temporary view*/ conn.dropView(); conn.closeConnection(); }
From source file:be.ugent.tiwi.sleroux.newsrec.newsreccollaborativefiltering.App.java
public static void main(String[] args) throws DaoException, IOException, TasteException { IRatingsDao ratingsDao = new JDBCRatingsDao(); MahoutDataFileWriter fileWriter = new MahoutDataFileWriter(ratingsDao, mahoutInputFile); String[] ids = fileWriter.writeOutputFile(); MahoutTermRecommender recommender = new MahoutTermRecommender(mahoutInputFile); Map<Long, List<RecommendedItem>> recommendations = recommender.makeRecommendations(10); for (Long user : recommendations.keySet()) { List<RecommendedItem> items = recommendations.get(user); System.out.println(user); for (RecommendedItem item : items) { System.out.println(ids[(int) item.getItemID()] + "\t" + item.getValue()); }/* w ww . jav a2 s . c o m*/ System.out.println(""); } RecommendationsToDatabase r2db = new RecommendationsToDatabase(ratingsDao); r2db.store(ids, recommendations); }
From source file:be.ugent.tiwi.sleroux.newsrec.newsreccollaborativefiltering.RecommendationsToDatabase.java
public void store(String[] terms, Map<Long, List<RecommendedItem>> recommendations) throws RatingsDaoException { for (Long user : recommendations.keySet()) { List<RecommendedItem> items = recommendations.get(user); if (items.size() > 0) { Map<String, Double> scoreMap = new HashMap<>(); for (RecommendedItem item : items) { String term = terms[(int) item.getItemID()]; double score = item.getValue(); scoreMap.put(term, score); }/*from w w w .j a va2 s . c o m*/ ratingsDao.giveRating(user, scoreMap); } } }
From source file:businessreco.BusinessReco.java
public static void main(String args[]) { try {/* w ww . j ava 2 s . c om*/ //Loading the DATA; DataModel dm = new FileDataModel(new File( "C:\\Users\\bryce\\Course Work\\3. Full Summer\\Big Data\\Final Project\\Yelp\\FINAL CODE\\Mahout\\data\\busirec_new.csv")); // We use the below line to relate businesses. //ItemSimilarity sim = new LogLikelihoodSimilarity(dm); TanimotoCoefficientSimilarity sim = new TanimotoCoefficientSimilarity((dm)); //Using the below line get recommendations GenericItemBasedRecommender recommender = new GenericItemBasedRecommender(dm, sim); //Looping through every business. for (LongPrimitiveIterator items = dm.getItemIDs(); items.hasNext();) { long itemId = items.nextLong(); // For each business we recommend 3 businesses. List<RecommendedItem> recommendations = recommender.mostSimilarItems(itemId, 2); for (RecommendedItem recommendation : recommendations) { System.out.println(itemId + "," + recommendation.getItemID() + "," + recommendation.getValue()); } } } catch (IOException | TasteException e) { System.out.println(e); } }
From source file:com.anjuke.romar.http.rest.BaseResource.java
License:Apache License
List<Object> wrapRecommendItem(RecommendResultResponse recommendResponse) { List<RecommendedItem> list = recommendResponse.getList(); List<Object> result = new ArrayList<Object>(); for (RecommendedItem item : list) { if (_allowItemStringID) { RecommendStringBean bean = new RecommendStringBean(); bean.setItem(getItemString(item.getItemID())); bean.setValue(item.getValue()); result.add(bean);//w w w. j ava 2s . c o m } else { RecommendBean bean = new RecommendBean(); bean.setItem(item.getItemID()); bean.setValue(item.getValue()); result.add(bean); } } return result; }
From source file:com.buddycloud.channeldirectory.search.handler.common.mahout.ChannelRecommender.java
License:Apache License
/** * Recommends a list of jids of channels that are * related to the user taste.//from w w w.ja v a2 s . c o m * * @param userJid The user jid * @param howMany The number of recommendations * @return A list of recommended channels' jids * @throws TasteException * @throws SQLException */ public RecommendationResponse recommend(String userJid, int howMany) throws TasteException, SQLException { Long userId = recommenderDataModel.toUserId(userJid); if (userId == null) { return new RecommendationResponse(new LinkedList<ChannelData>(), 0); } List<RecommendedItem> recommended = userRecommender.recommend(userId, howMany); List<ChannelData> recommendedChannels = new LinkedList<ChannelData>(); for (RecommendedItem recommendedItem : recommended) { recommendedChannels.add(recommenderDataModel.toChannelData(recommendedItem.getItemID())); } return new RecommendationResponse(recommendedChannels, getPreferenceCount(userId)); }
From source file:com.buddycloud.channeldirectory.search.handler.common.mahout.ChannelRecommender.java
License:Apache License
/** * Recommends a list of jids of channels that are * similar to a given channel.//from w ww. jav a2s.c om * * @param channelJid The channel jid * @param howMany The number of recommendations * @return A list of similar channels' jids * @throws TasteException * @throws SQLException */ public RecommendationResponse getSimilarChannels(String channelJid, int howMany) throws TasteException, SQLException { Long itemId = recommenderDataModel.toChannelId(channelJid); if (itemId == null) { return new RecommendationResponse(new LinkedList<ChannelData>(), 0); } TopItems.Estimator<Long> estimator = new MostSimilarEstimator(itemId, itemSimilarity, null); MostSimilarItemsCandidateItemsStrategy candidateStrategy = new PreferredItemsNeighborhoodCandidateItemsStrategy(); FastIDSet possibleItemIDs = candidateStrategy.getCandidateItems(new long[] { itemId }, recommenderDataModel.getDataModel()); List<RecommendedItem> recommended = TopItems.getTopItems(howMany, possibleItemIDs.iterator(), null, estimator); List<ChannelData> recommendedChannels = new LinkedList<ChannelData>(); for (RecommendedItem recommendedItem : recommended) { recommendedChannels.add(recommenderDataModel.toChannelData(recommendedItem.getItemID())); } return new RecommendationResponse(recommendedChannels, possibleItemIDs.size()); }
From source file:com.msiiplab.recsys.rwr.GLRecommenderIRStatsEvaluator.java
License:Apache License
public GLIRStatisticsImpl evaluate(RecommenderBuilder recommenderBuilder, List<DataModel> trainingDataModels, List<DataModel> testingDataModels, IDRescorer rescorer, int at, double relevanceThreshold, double evaluationPercentage) throws TasteException { Preconditions.checkArgument(recommenderBuilder != null, "recommenderBuilder is null"); Preconditions.checkArgument(trainingDataModels != null, "trainingDataModels is null"); Preconditions.checkArgument(testingDataModels != null, "testingDataModels is null"); Preconditions.checkArgument(testingDataModels.size() == trainingDataModels.size(), "trainingDataModels.size must equals testingDataModels.size"); Preconditions.checkArgument(at >= 1, "at must be at least 1"); Preconditions.checkArgument(evaluationPercentage > 0.0 && evaluationPercentage <= 1.0, "Invalid evaluationPercentage: %s", evaluationPercentage); // num of train/test pair: num of cross validation folds int numFolds = trainingDataModels.size(); RunningAverage CrossValidationPrecision = new GLRunningAverage(); RunningAverage CrossValidationRPrecision = new GLRunningAverage(); RunningAverage CrossValidationRecall = new GLRunningAverage(); RunningAverage CrossValidationFallOut = new GLRunningAverage(); RunningAverage CrossValidationNDCG = new GLRunningAverage(); RunningAverage CrossValidationRNDCG = new GLRunningAverage();//rating-nDCG RunningAverage CrossValidationReach = new GLRunningAverage(); RunningAverage CrossValidationMacroDOA = new GLRunningAverage(); RunningAverage CrossValidationMicroDOA = new GLRunningAverage(); RunningAverage CrossValidationMacroInnerDOA = new GLRunningAverage(); RunningAverage CrossValidationMicroInnerDOA = new GLRunningAverage(); for (int i_folds = 0; i_folds < numFolds; i_folds++) { log.info("fold {}", i_folds); DataModel trainDataModel = trainingDataModels.get(i_folds); DataModel testDataModel = testingDataModels.get(i_folds); FastIDSet MovieIDs = new FastIDSet(); LongPrimitiveIterator it_train_temp = trainDataModel.getItemIDs(); LongPrimitiveIterator it_test_temp = testDataModel.getItemIDs(); while (it_train_temp.hasNext()) { MovieIDs.add(it_train_temp.nextLong()); }/* w w w . ja v a 2s . co m*/ while (it_test_temp.hasNext()) { MovieIDs.add(it_test_temp.nextLong()); } int numTrainItems = trainDataModel.getNumItems(); int numTestItems = testDataModel.getNumItems(); int numItems = numTestItems + numTrainItems; RunningAverage precision = new GLRunningAverage(); RunningAverage rPrecision = new GLRunningAverage(); RunningAverage recall = new GLRunningAverage(); RunningAverage fallOut = new GLRunningAverage(); RunningAverage nDCG = new GLRunningAverage(); RunningAverage rNDCG = new GLRunningAverage(); RunningAverage macroDOA = new GLRunningAverage(); RunningAverage microDOA1 = new GLRunningAverage(); RunningAverage microDOA2 = new GLRunningAverage(); RunningAverage macroInnerDOA = new GLRunningAverage(); RunningAverage microInnerDOA1 = new GLRunningAverage(); RunningAverage microInnerDOA2 = new GLRunningAverage(); int numUsersRecommendedFor = 0; int numUsersWithRecommendations = 0; long start = System.currentTimeMillis(); // Build recommender Recommender recommender = recommenderBuilder.buildRecommender(trainDataModel); LongPrimitiveIterator it_user = testDataModel.getUserIDs(); while (it_user.hasNext()) { long userID = it_user.nextLong(); log.info("user {}", userID); // Use all in testDataModel as relevant FastIDSet learnedItemIDs; FastIDSet relevantItemIDs; try { learnedItemIDs = trainDataModel.getItemIDsFromUser(userID); relevantItemIDs = testDataModel.getItemIDsFromUser(userID); } catch (NoSuchUserException e1) { continue; } // We excluded zero relevant items situation int numRelevantItems = relevantItemIDs.size(); if (numRelevantItems <= 0) { continue; } // We excluded all prefs for the user that has no pref record in // training set try { trainDataModel.getPreferencesFromUser(userID); } catch (NoSuchUserException nsee) { continue; // Oops we excluded all prefs for the user -- just // move on } // Recommend items List<RecommendedItem> recommendedItems = recommender.recommend(userID, at, rescorer); List<RecommendedItem> recommendedItemsAtRelNum = recommender.recommend(userID, numRelevantItems, rescorer); PreferenceArray userPreferences = testDataModel.getPreferencesFromUser(userID); FastByIDMap<Preference> userPreferenceMap = getPrefereceMap(userPreferences); userPreferences.sortByValueReversed(); // relevantItemIDsAtN only consider top N items as relevant items FastIDSet relevantItemIDsAtN = new FastIDSet(); Iterator<Preference> it_pref = userPreferences.iterator(); int num_pref = 0; while (it_pref.hasNext()) { relevantItemIDsAtN.add(it_pref.next().getItemID()); num_pref++; if (num_pref >= at) { break; } } // Compute intersection between recommended items and relevant // items int intersectionSize = 0; int numRecommendedItems = recommendedItems.size(); for (RecommendedItem recommendedItem : recommendedItems) { if (relevantItemIDs.contains(recommendedItem.getItemID())) { intersectionSize++; } } // Precision double prec = 0; if (numRecommendedItems > 0) { prec = (double) intersectionSize / (double) numRecommendedItems; } precision.addDatum(prec); log.info("Precision for user {} is {}", userID, prec); // Recall double rec = (double) intersectionSize / (double) numRelevantItems; recall.addDatum(rec); log.info("Recall for user {} is {}", userID, rec); // R-precision double rprec = 0; int intersectionSizeAtRelNum = 0; int numRecommendedItemsAtRelNum = recommendedItemsAtRelNum.size(); for (RecommendedItem recommendedItem : recommendedItemsAtRelNum) { if (relevantItemIDs.contains(recommendedItem.getItemID())) { intersectionSizeAtRelNum++; } } if (numRecommendedItemsAtRelNum > 0) { rprec = (double) intersectionSizeAtRelNum / (double) numRelevantItems; } rPrecision.addDatum(rprec); log.info("RPrecision for user {} is {}", userID, rprec); double F1 = 0; if (prec + rec > 0) { F1 = 2 * prec * rec / (prec + rec); } log.info("F1 for user {} is {}", userID, F1); // Fall-out double fall = 0; int size = numRelevantItems + trainDataModel.getItemIDsFromUser(userID).size(); if (numRelevantItems < size) { fall = (double) (numRecommendedItems - intersectionSize) / (double) (numItems - numRelevantItems); } fallOut.addDatum(fall); log.info("Fallout for user {} is {}", userID, fall); // nDCG // In computing, assume relevant IDs have relevance ${rating} and others // 0 PreferenceArray userPredictions = getPreferenceArray(recommendedItems, userID); double userNDCG = computeNDCG(userPreferences, userPredictions, relevantItemIDs, userPreferenceMap, at); double userRNDCG = computeRNDCG(userPreferences, userPredictions, relevantItemIDs, userPreferenceMap, at); nDCG.addDatum(userNDCG); rNDCG.addDatum(userRNDCG); log.info("NDCG for user {} is {}", userID, userNDCG); log.info("RNDCG for user {} is {}", userID, userRNDCG); // Reach numUsersRecommendedFor++; if (numRecommendedItems > 0) { numUsersWithRecommendations++; } // DOA // [Siegel and Castellan, 1988] and [Gori and Pucci, 2007] // LongPrimitiveIterator it_movies = MovieIDs.iterator(); LongPrimitiveIterator it_movies = trainDataModel.getItemIDs(); long numNW = 0; long sumCheckOrder = 0; while (it_movies.hasNext()) { long itemID = it_movies.nextLong(); if (!learnedItemIDs.contains(itemID) && !relevantItemIDs.contains(itemID)) { // itemID is in NW_{u_i} numNW++; LongPrimitiveIterator it_test = relevantItemIDs.iterator(); while (it_test.hasNext()) { long testItemID = it_test.nextLong(); float itemPref = 0; float testItemPref = 0; try { itemPref = recommender.estimatePreference(userID, itemID); } catch (NoSuchItemException e) { } try { testItemPref = recommender.estimatePreference(userID, testItemID); } catch (NoSuchItemException e) { } if (itemPref <= testItemPref) { sumCheckOrder++; } } } } if (numNW > 0 && relevantItemIDs.size() > 0) { macroDOA.addDatum((double) sumCheckOrder / (double) (relevantItemIDs.size() * numNW)); microDOA1.addDatum((double) sumCheckOrder); microDOA2.addDatum((double) (relevantItemIDs.size() * numNW)); } // log.info( // "sumCheckOrder / (numNW * numRelevant) = {} / ({} * {})", // sumCheckOrder, numNW, relevantItemIDs.size()); // InnerDOA: only check the agreement of order in test set LongPrimitiveIterator it_test1 = relevantItemIDs.iterator(); long sumCheckInnerOrder = 0; long sumAll = 0; while (it_test1.hasNext()) { long itemID1 = it_test1.nextLong(); LongPrimitiveIterator it_test2 = relevantItemIDs.iterator(); while (it_test2.hasNext()) { long itemID2 = it_test2.nextLong(); if (itemID1 != itemID2) { try { float pref_v1 = testDataModel.getPreferenceValue(userID, itemID1); float pref_v2 = testDataModel.getPreferenceValue(userID, itemID2); float predict_v1 = recommender.estimatePreference(userID, itemID1); float predict_v2 = recommender.estimatePreference(userID, itemID2); if ((pref_v1 >= pref_v2 && predict_v1 >= predict_v2) || (pref_v1 <= pref_v2 && predict_v1 <= predict_v2)) { sumCheckInnerOrder++; } sumAll++; } catch (NoSuchItemException e) { // do nothing, just ignore } } } } if (relevantItemIDs.size() > 1) { macroInnerDOA.addDatum((double) sumCheckInnerOrder / (double) sumAll); microInnerDOA1.addDatum((double) sumCheckInnerOrder); microInnerDOA2.addDatum((double) sumAll); } // log.info( // "sumCheckInnerOrder / (|T| * (|T|-1) ) = {} / ({} * {}) = ", // sumCheckInnerOrder, relevantItemIDs.size(), relevantItemIDs.size()-1); } long end = System.currentTimeMillis(); CrossValidationPrecision.addDatum(precision.getAverage()); CrossValidationRPrecision.addDatum(rPrecision.getAverage()); CrossValidationRecall.addDatum(recall.getAverage()); CrossValidationFallOut.addDatum(fallOut.getAverage()); CrossValidationNDCG.addDatum(nDCG.getAverage()); CrossValidationRNDCG.addDatum(rNDCG.getAverage()); CrossValidationReach.addDatum((double) numUsersWithRecommendations / (double) numUsersRecommendedFor); CrossValidationMacroDOA.addDatum(macroDOA.getAverage()); CrossValidationMicroDOA.addDatum(microDOA1.getAverage() / microDOA2.getAverage()); CrossValidationMacroInnerDOA.addDatum(macroInnerDOA.getAverage()); CrossValidationMicroInnerDOA.addDatum(microInnerDOA1.getAverage() / microInnerDOA2.getAverage()); log.info("Evaluated with training/testing set # {} in {}ms", i_folds, end - start); System.out.printf("Evaluated with training/testing set # %d in %d ms \n", i_folds, end - start); log.info( "Precision/R-Precision/recall/fall-out/nDCG/rNDCG/reach/macroDOA/microDOA/macroInnerDOA/microInnerDOA: {} / {} / {} / {} / {} / {} / {} / {} / {} / {} / {}", precision.getAverage(), rPrecision.getAverage(), recall.getAverage(), fallOut.getAverage(), nDCG.getAverage(), rNDCG.getAverage(), (double) numUsersWithRecommendations / (double) numUsersRecommendedFor, macroDOA.getAverage(), microDOA1.getAverage() / microDOA2.getAverage(), macroInnerDOA.getAverage(), microInnerDOA1.getAverage() / microInnerDOA2.getAverage()); System.out.printf( "Precision/R-Precision/recall/fall-out/nDCG/rNDCG/reach/macroDOA/microDOA/macroInnerDOA/microInnerDOA: %f / %f / %f / %f / %f / %f / %f / %f / %f / %f / %f \n", precision.getAverage(), rPrecision.getAverage(), recall.getAverage(), fallOut.getAverage(), nDCG.getAverage(), rNDCG.getAverage(), (double) numUsersWithRecommendations / (double) numUsersRecommendedFor, macroDOA.getAverage(), microDOA1.getAverage() / microDOA2.getAverage(), macroInnerDOA.getAverage(), microInnerDOA1.getAverage() / microInnerDOA2.getAverage()); } log.info( "Cross Validation Precision/R-Precision/recall/fall-out/nDCG/rNDCG/reach/macroDOA/microDOA: {} / {} / {} / {} / {} / {} / {} / {} / {} / {} / {}", CrossValidationPrecision.getAverage(), CrossValidationRPrecision.getAverage(), CrossValidationRecall.getAverage(), CrossValidationFallOut.getAverage(), CrossValidationNDCG.getAverage(), CrossValidationRNDCG.getAverage(), CrossValidationReach.getAverage(), CrossValidationMacroDOA.getAverage(), CrossValidationMicroDOA.getAverage(), CrossValidationMacroInnerDOA.getAverage(), CrossValidationMicroInnerDOA.getAverage()); System.out.printf( "Cross Validation: \nPrecision/R-Precision/recall/fall-out/nDCG/rNDCG/reach/macroDOA/microDOA: %f / %f / %f / %f / %f / %f / %f / %f / %f / %f / %f\n", CrossValidationPrecision.getAverage(), CrossValidationRPrecision.getAverage(), CrossValidationRecall.getAverage(), CrossValidationFallOut.getAverage(), CrossValidationNDCG.getAverage(), CrossValidationRNDCG.getAverage(), CrossValidationReach.getAverage(), CrossValidationMacroDOA.getAverage(), CrossValidationMicroDOA.getAverage(), CrossValidationMacroInnerDOA.getAverage(), CrossValidationMicroInnerDOA.getAverage()); return new GLIRStatisticsImpl(CrossValidationPrecision.getAverage(), CrossValidationRPrecision.getAverage(), CrossValidationRecall.getAverage(), CrossValidationFallOut.getAverage(), CrossValidationNDCG.getAverage(), CrossValidationRNDCG.getAverage(), CrossValidationReach.getAverage(), CrossValidationMacroDOA.getAverage(), CrossValidationMicroDOA.getAverage(), CrossValidationMacroInnerDOA.getAverage(), CrossValidationMicroInnerDOA.getAverage()); }