Example usage for org.apache.mahout.cf.taste.hadoop.item AggregateAndRecommendReducer ITEMS_FILE

List of usage examples for org.apache.mahout.cf.taste.hadoop.item AggregateAndRecommendReducer ITEMS_FILE

Introduction

In this page you can find the example usage for org.apache.mahout.cf.taste.hadoop.item AggregateAndRecommendReducer ITEMS_FILE.

Prototype

String ITEMS_FILE

To view the source code for org.apache.mahout.cf.taste.hadoop.item AggregateAndRecommendReducer ITEMS_FILE.

Click Source Link

Usage

From source file:hadoop.api.RecommenderJob.java

License:Apache License

/**
 * Calculate the recommender//from www.  ja va  2 s.  c o  m
 *
 * @param args Information about the input pathpartialMultiply, explicitFilterPath, numRecommendations
 * @return
 */
public int recommender(String[] args) {
    try {
        prepareRecommender(args);
    } catch (IOException e) {
        e.printStackTrace();
    }
    Path explicitFilterPath = new Path(prepPath, "explicitFilterPath");
    Path partialMultiplyPath = new Path(prepPath, "partialMultiply");
    Path outputPath = getOutputPath();
    String itemsFile = getOption("itemsFile");
    String filterFile = getOption("filterFile");
    boolean booleanData = Boolean.valueOf(getOption("booleanData"));
    int numRecommendations = Integer.parseInt(getOption("numRecommendations"));

    if (shouldRunNextPhase(parsedArgs, currentPhase)) {
        //filter out any users we don't care about
        if (filterFile != null) {
            Job itemFiltering = null;
            try {
                itemFiltering = prepareJob(new Path(filterFile), explicitFilterPath, TextInputFormat.class,
                        ItemFilterMapper.class, VarLongWritable.class, VarLongWritable.class,
                        ItemFilterAsVectorAndPrefsReducer.class, VarIntWritable.class,
                        VectorAndPrefsWritable.class, SequenceFileOutputFormat.class);
            } catch (IOException e) {
                e.printStackTrace();
            }
            boolean succeeded = false;
            try {
                succeeded = itemFiltering.waitForCompletion(true);
            } catch (IOException e) {
                e.printStackTrace();
            } catch (InterruptedException e) {
                e.printStackTrace();
            } catch (ClassNotFoundException e) {
                e.printStackTrace();
            }
            if (!succeeded) {
                return -1;
            }
        }

        String aggregateAndRecommendInput = partialMultiplyPath.toString();
        if (filterFile != null) {
            aggregateAndRecommendInput += "," + explicitFilterPath;
        }

        Class<? extends OutputFormat> outputFormat = parsedArgs.containsKey("--sequencefileOutput")
                ? SequenceFileOutputFormat.class
                : TextOutputFormat.class;

        //extract out the recommendations
        Job aggregateAndRecommend = null;
        try {
            aggregateAndRecommend = prepareJob(new Path(aggregateAndRecommendInput), outputPath,
                    SequenceFileInputFormat.class, PartialMultiplyMapper.class, VarLongWritable.class,
                    PrefAndSimilarityColumnWritable.class,
                    org.apache.mahout.cf.taste.hadoop.item.AggregateAndRecommendReducer.class,
                    VarLongWritable.class, RecommendedItemsWritable.class, outputFormat);
        } catch (IOException e) {
            e.printStackTrace();
        }
        Configuration aggregateAndRecommendConf = aggregateAndRecommend.getConfiguration();
        if (itemsFile != null) {
            aggregateAndRecommendConf.set(hadoop.api.AggregateAndRecommendReducer.ITEMS_FILE, itemsFile);
        }

        if (filterFile != null) {
            try {
                setS3SafeCombinedInputPath(aggregateAndRecommend, getTempPath(), partialMultiplyPath,
                        explicitFilterPath);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        setIOSort(aggregateAndRecommend);
        aggregateAndRecommendConf.set(hadoop.api.AggregateAndRecommendReducer.ITEMID_INDEX_PATH,
                new Path(prepPath, PreparePreferenceMatrixJob.ITEMID_INDEX).toString());
        aggregateAndRecommendConf.setInt(hadoop.api.AggregateAndRecommendReducer.NUM_RECOMMENDATIONS,
                numRecommendations);
        aggregateAndRecommendConf.setBoolean(BOOLEAN_DATA, booleanData);
        boolean succeeded = false;
        try {
            succeeded = aggregateAndRecommend.waitForCompletion(true);
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        }
        if (!succeeded) {
            return -1;
        }
    }

    return 0;
}