Example usage for org.apache.commons.configuration PropertiesConfiguration PropertiesConfiguration

List of usage examples for org.apache.commons.configuration PropertiesConfiguration PropertiesConfiguration

Introduction

In this page you can find the example usage for org.apache.commons.configuration PropertiesConfiguration PropertiesConfiguration.

Prototype

public PropertiesConfiguration() 

Source Link

Document

Creates an empty PropertyConfiguration object which can be used to synthesize a new Properties file by adding values and then saving().

Usage

From source file:ch.epfl.lsir.xin.test.UserBasedCFTest.java

/**
 * @param args//from  ww w .j a v  a  2  s . com
 */
public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    PrintWriter logger = new PrintWriter(".//results//UserBasedCF");
    PropertiesConfiguration config = new PropertiesConfiguration();
    config.setFile(new File(".//conf//UserBasedCF.properties"));
    try {
        config.load();
    } catch (ConfigurationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data...");
    DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt");
    loader.readSimple();
    DataSetNumeric dataset = loader.getDataset();
    System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: "
            + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size());
    logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: "
            + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size());
    logger.flush();

    double totalMAE = 0;
    double totalRMSE = 0;
    double totalPrecision = 0;
    double totalRecall = 0;
    double totalMAP = 0;
    double totalNDCG = 0;
    double totalMRR = 0;
    double totalAUC = 0;
    int F = 5;
    logger.println(F + "- folder cross validation.");
    ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>();
    for (int i = 0; i < F; i++) {
        folders.add(new ArrayList<NumericRating>());
    }
    while (dataset.getRatings().size() > 0) {
        int index = new Random().nextInt(dataset.getRatings().size());
        int r = new Random().nextInt(F);
        folders.get(r).add(dataset.getRatings().get(index));
        dataset.getRatings().remove(index);
    }

    for (int folder = 1; folder <= F; folder++) {
        logger.println("Folder: " + folder);
        System.out.println("Folder: " + folder);
        ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>();
        ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>();
        for (int i = 0; i < folders.size(); i++) {
            if (i == folder - 1)//test data
            {
                testRatings.addAll(folders.get(i));
            } else {//training data
                trainRatings.addAll(folders.get(i));
            }
        }

        //create rating matrix
        HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>();
        HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>();
        for (int i = 0; i < dataset.getUserIDs().size(); i++) {
            userIDIndexMapping.put(dataset.getUserIDs().get(i), i);
        }
        for (int i = 0; i < dataset.getItemIDs().size(); i++) {
            itemIDIndexMapping.put(dataset.getItemIDs().get(i), i);
        }
        RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < trainRatings.size(); i++) {
            trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue());
        }
        trainRatingMatrix.calculateGlobalAverage();
        trainRatingMatrix.calculateUsersMean();
        RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < testRatings.size(); i++) {
            //            if( testRatings.get(i).getValue() < 5 )
            //               continue;
            testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue());
        }
        logger.println("Initialize a user based collaborative filtering recommendation model.");
        UserBasedCF algo = new UserBasedCF(trainRatingMatrix, false,
                ".//localModels//" + config.getString("NAME"));
        algo.setLogger(logger);
        algo.build();//if read local model, no need to build the model
        algo.saveModel(".//localModels//" + config.getString("NAME"));
        logger.println("Save the model.");
        System.out.println(trainRatings.size() + " vs. " + testRatings.size());
        logger.flush();

        //rating prediction accuracy
        double RMSE = 0;
        double MAE = 0;
        double precision = 0;
        double recall = 0;
        double map = 0;
        double ndcg = 0;
        double mrr = 0;
        double auc = 0;
        int count = 0;
        for (int i = 0; i < testRatings.size(); i++) {
            NumericRating rating = testRatings.get(i);
            double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()),
                    itemIDIndexMapping.get(rating.getItemID()), false);

            if (Double.isNaN(prediction)) {
                System.out.println("no prediction");
                continue;
            }
            if (prediction > algo.getMaxRating())
                prediction = algo.getMaxRating();
            if (prediction < algo.getMinRating())
                prediction = algo.getMinRating();
            MAE = MAE + Math.abs(rating.getValue() - prediction);
            RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2);
            count++;
        }
        MAE = MAE / count;
        RMSE = Math.sqrt(RMSE / count);
        totalMAE = totalMAE + MAE;
        totalRMSE = totalRMSE + RMSE;
        System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE);
        logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: "
                + MAE + " RMSE: " + RMSE);
        logger.flush();
        //ranking accuracy
        if (algo.getTopN() > 0) {
            HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>();
            for (int i = 0; i < testRatingMatrix.getRow(); i++) {
                ArrayList<ResultUnit> rec = algo.getRecommendationList(i);
                if (rec == null)
                    continue;
                int total = testRatingMatrix.getUserRatingNumber(i);
                if (total == 0)//this user is ignored
                    continue;
                results.put(i, rec);
                //               for( Map.Entry<Integer, Double> entry : testRatingMatrix.getRatingMatrix().get(i).entrySet() )
                //               {
                //                  System.out.print( entry.getKey() + "(" + entry.getValue() + ") , ");
                //               }
                //               System.out.println();
                //               for( int j = 0 ; j < rec.size() ; j++ )
                //               {
                //                  System.out.print(rec.get(j).getItemIndex() + "(" + rec.get(j).getPrediciton() +
                //                        ") , ");
                //               }
                //               System.out.println("**********");
            }
            RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix,
                    trainRatingMatrix);
            precision = generator.getPrecisionN();
            totalPrecision = totalPrecision + precision;
            recall = generator.getRecallN();
            totalRecall = totalRecall + recall;
            map = generator.getMAPN();
            totalMAP = totalMAP + map;
            ndcg = generator.getNDCGN();
            totalNDCG = totalNDCG + ndcg;
            mrr = generator.getMRRN();
            totalMRR = totalMRR + mrr;
            auc = generator.getAUC();
            totalAUC = totalAUC + auc;
            System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map
                    + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc);
            logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map
                    + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc);
        }
    }

    System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F);
    System.out.println("Precision@N: " + totalPrecision / F);
    System.out.println("Recall@N: " + totalRecall / F);
    System.out.println("MAP@N: " + totalMAP / F);
    System.out.println("MRR@N: " + totalMRR / F);
    System.out.println("NDCG@N: " + totalNDCG / F);
    System.out.println("AUC@N: " + totalAUC / F);
    // MovieLens100k
    //MAE: 0.7343907480119425 RMSE: 0.9405808357192891 (MovieLens 100K, shrinkage 25, neighbor size 60, PCC)
    //MAE: 0.7522376630596646 RMSE: 0.9520931265724659 (MovieLens 100K, no shrinkage , neighbor size 40, COSINE)
    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: "
            + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n"
            + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F
            + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F);
    logger.flush();
    logger.close();

}

From source file:com.linkedin.pinot.perf.FilterOperatorBenchmark.java

public static void main(String[] args) throws Exception {
    String rootDir = args[0];/*w  w w.  ja  v  a2 s .c o  m*/
    File[] segmentDirs = new File(rootDir).listFiles();
    String query = args[1];
    AtomicInteger totalDocsMatched = new AtomicInteger(0);
    Pql2Compiler pql2Compiler = new Pql2Compiler();
    BrokerRequest brokerRequest = pql2Compiler.compileToBrokerRequest(query);
    List<Callable<Void>> segmentProcessors = new ArrayList<>();
    long[] timesSpent = new long[segmentDirs.length];
    for (int i = 0; i < segmentDirs.length; i++) {
        File indexSegmentDir = segmentDirs[i];
        System.out.println("Loading " + indexSegmentDir.getName());
        Configuration tableDataManagerConfig = new PropertiesConfiguration();
        List<String> invertedColumns = new ArrayList<>();
        FilenameFilter filter = new FilenameFilter() {

            @Override
            public boolean accept(File dir, String name) {
                return name.endsWith(".bitmap.inv");
            }
        };
        String[] indexFiles = indexSegmentDir.list(filter);
        for (String indexFileName : indexFiles) {
            invertedColumns.add(indexFileName.replace(".bitmap.inv", ""));
        }
        tableDataManagerConfig.setProperty(IndexLoadingConfigMetadata.KEY_OF_LOADING_INVERTED_INDEX,
                invertedColumns);
        IndexLoadingConfigMetadata indexLoadingConfigMetadata = new IndexLoadingConfigMetadata(
                tableDataManagerConfig);
        IndexSegmentImpl indexSegmentImpl = (IndexSegmentImpl) Loaders.IndexSegment.load(indexSegmentDir,
                ReadMode.heap, indexLoadingConfigMetadata);
        segmentProcessors
                .add(new SegmentProcessor(i, indexSegmentImpl, brokerRequest, totalDocsMatched, timesSpent));
    }
    ExecutorService executorService = Executors.newCachedThreadPool();
    for (int run = 0; run < 5; run++) {
        System.out.println("START RUN:" + run);
        totalDocsMatched.set(0);
        long start = System.currentTimeMillis();
        List<Future<Void>> futures = executorService.invokeAll(segmentProcessors);
        for (int i = 0; i < futures.size(); i++) {
            futures.get(i).get();
        }
        long end = System.currentTimeMillis();
        System.out.println("Total docs matched:" + totalDocsMatched + " took:" + (end - start));
        System.out.println("Times spent:" + Arrays.toString(timesSpent));
        System.out.println("END RUN:" + run);
    }
    System.exit(0);
}

From source file:edu.usc.goffish.gofs.tools.GoFSFormat.java

public static void main(String[] args) throws IOException {
    if (args.length < REQUIRED_ARGS) {
        PrintUsageAndQuit(null);/*www . j  a  v a  2s  .  c o  m*/
    }

    if (args.length == 1 && args[0].equals("-help")) {
        PrintUsageAndQuit(null);
    }

    Path executableDirectory;
    try {
        executableDirectory = Paths
                .get(GoFSFormat.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent();
    } catch (URISyntaxException e) {
        throw new RuntimeException("Unexpected error retrieving executable location", e);
    }
    Path configPath = executableDirectory.resolve(DEFAULT_CONFIG).normalize();

    boolean copyBinaries = false;

    // parse optional arguments
    int i = 0;
    OptArgLoop: for (i = 0; i < args.length - REQUIRED_ARGS; i++) {
        switch (args[i]) {
        case "-config":
            i++;

            try {
                configPath = Paths.get(args[i]);
            } catch (InvalidPathException e) {
                PrintUsageAndQuit("Config file - " + e.getMessage());
            }

            break;
        case "-copyBinaries":
            copyBinaries = true;
            break;
        default:
            break OptArgLoop;
        }
    }

    if (args.length - i < REQUIRED_ARGS) {
        PrintUsageAndQuit(null);
    }

    // finished parsing args
    if (i < args.length) {
        PrintUsageAndQuit("Unrecognized argument \"" + args[i] + "\"");
    }

    // parse config

    System.out.println("Parsing config...");

    PropertiesConfiguration config = new PropertiesConfiguration();
    config.setDelimiterParsingDisabled(true);
    try {
        config.load(Files.newInputStream(configPath));
    } catch (ConfigurationException e) {
        throw new IOException(e);
    }

    // retrieve data nodes
    ArrayList<URI> dataNodes;
    {
        String[] dataNodesArray = config.getStringArray(GOFS_DATANODES_KEY);
        if (dataNodesArray.length == 0) {
            throw new ConversionException("Config must contain key " + GOFS_DATANODES_KEY);
        }

        dataNodes = new ArrayList<>(dataNodesArray.length);

        if (dataNodesArray.length == 0) {
            throw new ConversionException("Config key " + GOFS_DATANODES_KEY
                    + " has invalid format - must define at least one data node");
        }

        try {
            for (String node : dataNodesArray) {
                URI dataNodeURI = new URI(node);

                if (!"file".equalsIgnoreCase(dataNodeURI.getScheme())) {
                    throw new ConversionException("config key " + GOFS_DATANODES_KEY + " value \"" + dataNodeURI
                            + "\" has invalid format - data node urls must have 'file' scheme");
                } else if (dataNodeURI.getPath() == null || dataNodeURI.getPath().isEmpty()) {
                    throw new ConversionException("config key " + GOFS_DATANODES_KEY + " value \"" + dataNodeURI
                            + "\" has invalid format - data node urls must have an absolute path specified");
                }

                // ensure uri ends with a slash, so we know it is a directory
                if (!dataNodeURI.getPath().endsWith("/")) {
                    dataNodeURI = dataNodeURI.resolve(dataNodeURI.getPath() + "/");
                }

                dataNodes.add(dataNodeURI);
            }
        } catch (URISyntaxException e) {
            throw new ConversionException(
                    "Config key " + GOFS_DATANODES_KEY + " has invalid format - " + e.getMessage());
        }
    }

    // validate serializer type
    Class<? extends ISliceSerializer> serializerType;
    {
        String serializerTypeName = config.getString(GOFS_SERIALIZER_KEY);
        if (serializerTypeName == null) {
            throw new ConversionException("Config must contain key " + GOFS_SERIALIZER_KEY);
        }

        try {
            serializerType = SliceSerializerProvider.loadSliceSerializerType(serializerTypeName);
        } catch (ReflectiveOperationException e) {
            throw new ConversionException(
                    "Config key " + GOFS_SERIALIZER_KEY + " has invalid format - " + e.getMessage());
        }
    }

    // retrieve name node
    IInternalNameNode nameNode;
    try {
        nameNode = NameNodeProvider.loadNameNodeFromConfig(config, GOFS_NAMENODE_TYPE_KEY,
                GOFS_NAMENODE_LOCATION_KEY);
    } catch (ReflectiveOperationException e) {
        throw new RuntimeException("Unable to load name node", e);
    }

    System.out.println("Contacting name node...");

    // validate name node
    if (!nameNode.isAvailable()) {
        throw new IOException("Name node at " + nameNode.getURI() + " is not available");
    }

    System.out.println("Contacting data nodes...");

    // validate data nodes
    for (URI dataNode : dataNodes) {
        // only attempt ssh if host exists
        if (dataNode.getHost() != null) {
            try {
                SSHHelper.SSH(dataNode, "true");
            } catch (IOException e) {
                throw new IOException("Data node at " + dataNode + " is not available", e);
            }
        }
    }

    // create temporary directory
    Path workingDir = Files.createTempDirectory("gofs_format");
    try {
        // create deploy directory
        Path deployDirectory = Files.createDirectory(workingDir.resolve(DATANODE_DIR_NAME));

        // create empty slice directory
        Files.createDirectory(deployDirectory.resolve(DataNode.DATANODE_SLICE_DIR));

        // copy binaries
        if (copyBinaries) {
            System.out.println("Copying binaries...");
            FileUtils.copyDirectory(executableDirectory.toFile(),
                    deployDirectory.resolve(executableDirectory.getFileName()).toFile());
        }

        // write config file
        Path dataNodeConfigFile = deployDirectory.resolve(DataNode.DATANODE_CONFIG);
        try {
            // create config for every data node and scp deploy folder into place
            for (URI dataNodeParent : dataNodes) {
                URI dataNode = dataNodeParent.resolve(DATANODE_DIR_NAME);

                PropertiesConfiguration datanode_config = new PropertiesConfiguration();
                datanode_config.setDelimiterParsingDisabled(true);
                datanode_config.setProperty(DataNode.DATANODE_INSTALLED_KEY, true);
                datanode_config.setProperty(DataNode.DATANODE_NAMENODE_TYPE_KEY,
                        config.getString(GOFS_NAMENODE_TYPE_KEY));
                datanode_config.setProperty(DataNode.DATANODE_NAMENODE_LOCATION_KEY,
                        config.getString(GOFS_NAMENODE_LOCATION_KEY));
                datanode_config.setProperty(DataNode.DATANODE_LOCALHOSTURI_KEY, dataNode.toString());

                try {
                    datanode_config.save(Files.newOutputStream(dataNodeConfigFile));
                } catch (ConfigurationException e) {
                    throw new IOException(e);
                }

                System.out.println("Formatting data node " + dataNode.toString() + "...");

                // scp everything into place on the data node
                SCPHelper.SCP(deployDirectory, dataNodeParent);

                // update name node
                nameNode.addDataNode(dataNode);
            }

            // update name node
            nameNode.setSerializer(serializerType);
        } catch (Exception e) {
            System.out.println(
                    "ERROR: data node formatting interrupted - name node and data nodes are in an inconsistent state and require clean up");
            throw e;
        }

        System.out.println("GoFS format complete");

    } finally {
        FileUtils.deleteQuietly(workingDir.toFile());
    }
}

From source file:edu.berkeley.sparrow.examples.ProtoFrontendAsync.java

public static void main(String[] args) {
    try {/*from  w ww.  j a  v a2  s . com*/
        OptionParser parser = new OptionParser();
        parser.accepts("c", "configuration file").withRequiredArg().ofType(String.class);
        parser.accepts("help", "print help statement");
        OptionSet options = parser.parse(args);

        if (options.has("help")) {
            parser.printHelpOn(System.out);
            System.exit(-1);
        }

        // Logger configuration: log to the console
        BasicConfigurator.configure();
        LOG.setLevel(Level.DEBUG);

        Configuration conf = new PropertiesConfiguration();

        if (options.has("c")) {
            String configFile = (String) options.valueOf("c");
            conf = new PropertiesConfiguration(configFile);
        }

        Random r = new Random();
        double lambda = conf.getDouble("job_arrival_rate_s", DEFAULT_JOB_ARRIVAL_RATE_S);
        int tasksPerJob = conf.getInt("tasks_per_job", DEFAULT_TASKS_PER_JOB);
        int benchmarkIterations = conf.getInt("benchmark.iterations", DEFAULT_BENCHMARK_ITERATIONS);
        int benchmarkId = conf.getInt("benchmark.id", DEFAULT_TASK_BENCHMARK);

        int schedulerPort = conf.getInt("scheduler_port", SchedulerThrift.DEFAULT_SCHEDULER_THRIFT_PORT);

        TProtocolFactory factory = new TBinaryProtocol.Factory();
        TAsyncClientManager manager = new TAsyncClientManager();

        long lastLaunch = System.currentTimeMillis();
        // Loop and generate tasks launches
        while (true) {
            // Lambda is the arrival rate in S, so we need to multiply the result here by
            // 1000 to convert to ms.
            long delay = (long) (generateInterarrivalDelay(r, lambda) * 1000);
            long curLaunch = lastLaunch + delay;
            long toWait = Math.max(0, curLaunch - System.currentTimeMillis());
            lastLaunch = curLaunch;
            if (toWait == 0) {
                LOG.warn("Generated workload not keeping up with real time.");
            }
            List<TTaskSpec> tasks = generateJob(tasksPerJob, benchmarkId, benchmarkIterations);
            TUserGroupInfo user = new TUserGroupInfo();
            user.setUser("*");
            user.setGroup("*");
            TSchedulingRequest req = new TSchedulingRequest();
            req.setApp("testApp");
            req.setTasks(tasks);
            req.setUser(user);

            TNonblockingTransport tr = new TNonblockingSocket("localhost", schedulerPort);
            SchedulerService.AsyncClient client = new SchedulerService.AsyncClient(factory, manager, tr);
            //client.registerFrontend("testApp", new RegisterCallback());
            client.submitJob(req, new SubmitCallback(req, tr));
        }
    } catch (Exception e) {
        LOG.error("Fatal exception", e);
    }
}

From source file:net.sf.mpaxs.test.ImpaxsExecution.java

/**
 *
 * @param args/*www.j  a v a  2s. c om*/
 */
public static void main(String[] args) {
    Options options = new Options();
    Option[] optionArray = new Option[] {
            OptionBuilder.withArgName("nhosts").hasArg()
                    .withDescription("Number of hosts for parallel processing").create("n"),
            OptionBuilder.withArgName("mjobs").hasArg().withDescription("Number of jobs to run in parallel")
                    .create("m"),
            OptionBuilder.withArgName("runmode").hasArg()
                    .withDescription("The mode in which to operate: one of <ALL,LOCAL,DISTRIBUTED>")
                    .create("r"), //            OptionBuilder.withArgName("gui").
            //            withDescription("Create gui for distributed execution").create("g")
    };
    for (Option opt : optionArray) {
        options.addOption(opt);
    }
    if (args.length == 0) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp(StartUp.class.getCanonicalName(), options, true);
        System.exit(1);
    }
    GnuParser gp = new GnuParser();
    int nhosts = 1;
    int mjobs = 10;
    boolean gui = false;
    Mode mode = Mode.ALL;
    try {
        CommandLine cl = gp.parse(options, args);
        if (cl.hasOption("n")) {
            nhosts = Integer.parseInt(cl.getOptionValue("n"));
        }
        if (cl.hasOption("m")) {
            mjobs = Integer.parseInt(cl.getOptionValue("m"));
        }
        if (cl.hasOption("r")) {
            mode = Mode.valueOf(cl.getOptionValue("r"));
        }
        //            if (cl.hasOption("g")) {
        //                gui = true;
        //            }
    } catch (Exception ex) {
        Logger.getLogger(StartUp.class.getName()).log(Level.SEVERE, null, ex);
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp(StartUp.class.getCanonicalName(), options, true);
        System.exit(1);
    }

    String version;
    try {
        version = net.sf.mpaxs.api.Version.getVersion();
        System.out.println("Running mpaxs " + version);
        File computeHostJarLocation = new File(System.getProperty("user.dir"), "mpaxs.jar");
        if (!computeHostJarLocation.exists() || !computeHostJarLocation.isFile()) {
            throw new IOException("Could not locate mpaxs.jar in " + System.getProperty("user.dir"));
        }
        final PropertiesConfiguration cfg = new PropertiesConfiguration();
        //set default execution type
        cfg.setProperty(ConfigurationKeys.KEY_EXECUTION_MODE, ExecutionType.DRMAA);
        //set location of compute host jar
        cfg.setProperty(ConfigurationKeys.KEY_PATH_TO_COMPUTEHOST_JAR, computeHostJarLocation);
        //do not exit to console when master server shuts down
        cfg.setProperty(ConfigurationKeys.KEY_MASTER_SERVER_EXIT_ON_SHUTDOWN, false);
        //limit the number of used compute hosts
        cfg.setProperty(ConfigurationKeys.KEY_MAX_NUMBER_OF_CHOSTS, nhosts);
        cfg.setProperty(ConfigurationKeys.KEY_NATIVE_SPEC, "");
        cfg.setProperty(ConfigurationKeys.KEY_GUI_MODE, gui);
        cfg.setProperty(ConfigurationKeys.KEY_SILENT_MODE, true);
        cfg.setProperty(ConfigurationKeys.KEY_SCHEDULE_WAIT_TIME, "500");
        final int maxJobs = mjobs;
        final int maxThreads = nhosts;
        final Mode runMode = mode;
        printMessage("Run mode: " + runMode);
        Executors.newSingleThreadExecutor().submit(new Runnable() {
            @Override
            public void run() {
                if (runMode == Mode.ALL || runMode == Mode.LOCAL) {
                    printMessage("Running Within VM Execution");
                    /*
                     * LOCAL within VM execution
                     */
                    WithinVmExecution lhe = new WithinVmExecution(maxJobs, maxThreads);
                    try {
                        Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.INFO,
                                "Sum is: " + lhe.call());
                    } catch (Exception ex) {
                        Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }

                if (runMode == Mode.ALL || runMode == Mode.DISTRIBUTED) {
                    printMessage("Running Distributed Host RMI Execution");
                    /*
                     * Grid Engine (DRMAA API) or local host distributed RMI execution
                     */
                    DistributedRmiExecution de = new DistributedRmiExecution(cfg, maxJobs);
                    try {
                        Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.INFO,
                                "Sum is: " + de.call());
                    } catch (Exception ex) {
                        Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
                System.exit(0);
            }
        });
    } catch (IOException ex) {
        Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:edu.berkeley.sparrow.examples.SimpleBackend.java

public static void main(String[] args) throws IOException, TException {
    OptionParser parser = new OptionParser();
    parser.accepts("c", "configuration file").withRequiredArg().ofType(String.class);
    parser.accepts("help", "print help statement");
    OptionSet options = parser.parse(args);

    if (options.has("help")) {
        parser.printHelpOn(System.out);
        System.exit(-1);/*from   w ww .j a va2  s .  c o  m*/
    }

    // Logger configuration: log to the console
    BasicConfigurator.configure();
    LOG.setLevel(Level.DEBUG);
    LOG.debug("debug logging on");

    Configuration conf = new PropertiesConfiguration();

    if (options.has("c")) {
        String configFile = (String) options.valueOf("c");
        try {
            conf = new PropertiesConfiguration(configFile);
        } catch (ConfigurationException e) {
        }
    }
    // Start backend server
    SimpleBackend protoBackend = new SimpleBackend();
    BackendService.Processor<BackendService.Iface> processor = new BackendService.Processor<BackendService.Iface>(
            protoBackend);

    int listenPort = conf.getInt(LISTEN_PORT, DEFAULT_LISTEN_PORT);
    int nodeMonitorPort = conf.getInt(NODE_MONITOR_PORT, NodeMonitorThrift.DEFAULT_NM_THRIFT_PORT);
    String nodeMonitorHost = conf.getString(NODE_MONITOR_HOST, DEFAULT_NODE_MONITOR_HOST);
    TServers.launchSingleThreadThriftServer(listenPort, processor);
    protoBackend.initialize(listenPort, nodeMonitorHost, nodeMonitorPort);
}

From source file:edu.berkeley.sparrow.examples.BBackend.java

public static void main(String[] args) throws IOException, TException {
    ipAddress = InetAddress.getLocalHost().toString();
    OptionParser parser = new OptionParser();
    parser.accepts("c", "configuration file").withRequiredArg().ofType(String.class);
    parser.accepts("help", "print help statement");
    OptionSet options = parser.parse(args);

    if (options.has("help")) {
        parser.printHelpOn(System.out);
        System.exit(-1);/*w w w.java 2 s .co  m*/
    }

    // Logger configuration: log to the console
    BasicConfigurator.configure();

    Configuration conf = new PropertiesConfiguration();

    if (options.has("c")) {
        String configFile = (String) options.valueOf("c");
        try {
            conf = new PropertiesConfiguration(configFile);
        } catch (ConfigurationException e) {
        }
    }
    // Start backend server
    LOG.setLevel(Level.toLevel(conf.getString(LOG_LEVEL, DEFAULT_LOG_LEVEL)));
    LOG.debug("debug logging on");
    int listenPort = conf.getInt(LISTEN_PORT, DEFAULT_LISTEN_PORT);
    int nodeMonitorPort = conf.getInt(NODE_MONITOR_PORT, NodeMonitorThrift.DEFAULT_NM_THRIFT_PORT);
    batchingDelay = conf.getLong(BATCHING_DELAY, DEFAULT_BATCHING_DELAY);
    String nodeMonitorHost = conf.getString(NODE_MONITOR_HOST, DEFAULT_NODE_MONITOR_HOST);
    int workerThread = conf.getInt(WORKER_THREADS, DEFAULT_WORKER_THREADS);
    appClientAdress = InetAddress.getByName(conf.getString(APP_CLIENT_IP));
    appClientPortNumber = conf.getInt(APP_CLIENT_PORT_NUMBER, DEFAULT_APP_CLIENT_PORT_NUMBER);
    executor = Executors.newFixedThreadPool(workerThread);
    // Starting logging of results
    resultLog = new SynchronizedWrite("ResultsBackend.txt");
    Thread resultLogTh = new Thread(resultLog);
    resultLogTh.start();

    BBackend protoBackend = new BBackend();
    BackendService.Processor<BackendService.Iface> processor = new BackendService.Processor<BackendService.Iface>(
            protoBackend);

    TServers.launchSingleThreadThriftServer(listenPort, processor);
    protoBackend.initialize(listenPort, nodeMonitorHost, nodeMonitorPort);

}

From source file:com.netflix.bdp.inviso.history.TraceJobHistoryLoader.java

public static void main(String[] args) throws Exception {
    FileSystem fs = FileSystem.newInstanceLocal(new Configuration());

    JobHistoryParser parser = new JobHistoryParser(fs, "/tmp/job_1405808155709_124465.history");

    //JobInfo jobInfo = parser.parse();
    TraceJobHistoryLoader loader = new TraceJobHistoryLoader(new PropertiesConfiguration());
    parser.parse(loader);//www  . ja v  a  2s . co  m

    ObjectMapper mapper = new ObjectMapper();
    mapper.configure(Feature.INDENT_OUTPUT, true);
    mapper.writeValue(new File("/tmp/mr2-hist.json"), loader.getJob());
}

From source file:com.splout.db.engine.BaseTest.java

public static Configuration getBaseConfiguration() {
    Configuration conf = new PropertiesConfiguration();
    if (System.getenv("REDIS_HOME") == null) {
        throw new RuntimeException("REDIS_HOME must be defined to run splout-redis unit tests");
    }/*from ww  w .j a  v  a 2s. com*/
    String redisServerPath = System.getenv("REDIS_HOME") + "/src/redis-server";
    conf.setProperty(RedisManager.REDIS_EXECUTABLE_CONF, redisServerPath);
    return conf;
}

From source file:com.linkedin.pinot.broker.broker.helix.DefaultHelixBrokerConfig.java

public static Configuration getDefaultBrokerConf() {
    Configuration brokerConf = new PropertiesConfiguration();

    // config based routing
    brokerConf.addProperty("pinot.broker.transport.routingMode", "HELIX");

    brokerConf.addProperty("pinot.broker.routing.table.builder.default.offline.class", "balanced");
    brokerConf.addProperty("pinot.broker.routing.table.builder.default.offline.numOfRoutingTables", "10");
    brokerConf.addProperty("pinot.broker.routing.table.builder.default.realtime.class",
            "Kafkahighlevelconsumerbased");
    brokerConf.addProperty("pinot.broker.routing.table.builder.tables", "");

    //client properties
    brokerConf.addProperty("pinot.broker.client.enableConsole", "true");
    brokerConf.addProperty("pinot.broker.client.queryPort", "8099");
    brokerConf.addProperty("pinot.broker.client.consolePath", "../webapp");

    // [PINOT-2435] setting to 0 so it doesn't disconnect from zk
    brokerConf.addProperty("pinot.broker.helix.flappingTimeWindowMs", "0");

    return brokerConf;
}