Example usage for weka.core Utils toCommandLine

List of usage examples for weka.core Utils toCommandLine

Introduction

In this page you can find the example usage for weka.core Utils toCommandLine.

Prototype

public static String toCommandLine(Object obj) 

Source Link

Document

Generates a commandline of the given object.

Usage

From source file:com.github.fracpete.multisearch.optimize.J48ConfidenceFactor.java

License:Open Source License

/**
 * The first parameter must be dataset,/*from w  w  w.ja  va 2 s. c  o  m*/
 * the (optional) second the class index (1-based, 'first' and 'last'
 * also supported).
 *
 * @param args   the commandline options
 * @throws Exception   if optimization fails for some reason
 */
public static void main(String[] args) throws Exception {
    if (args.length == 0) {
        System.err.println("\nUsage: J48ConfidenceFactor <dataset> [classindex]\n");
        System.exit(1);
    }

    // load data
    Instances data = ExampleHelper.loadData(args[0], (args.length > 1) ? args[1] : null);

    // configure classifier we want to optimize
    J48 j48 = new J48();

    // configure multisearch
    MathParameter conf = new MathParameter();
    conf.setProperty("confidenceFactor");
    conf.setBase(10);
    conf.setMin(0.05);
    conf.setMax(0.75);
    conf.setStep(0.05);
    conf.setExpression("I");
    MultiSearch multi = new MultiSearch();
    multi.setClassifier(j48);
    multi.setSearchParameters(new AbstractParameter[] { conf });
    SelectedTag tag = new SelectedTag(DefaultEvaluationMetrics.EVALUATION_AUC,
            new DefaultEvaluationMetrics().getTags());
    multi.setEvaluation(tag);

    // output configuration
    System.out.println("\nMultiSearch commandline:\n" + Utils.toCommandLine(multi));

    // optimize
    System.out.println("\nOptimizing...\n");
    multi.buildClassifier(data);
    System.out.println("Best setup:\n" + Utils.toCommandLine(multi.getBestClassifier()));
    System.out.println("Best parameter: " + multi.getGenerator().evaluate(multi.getBestValues()));
}

From source file:com.github.fracpete.multisearch.optimize.PLSFilterAndLinearRegression.java

License:Open Source License

/**
 * The first parameter must be dataset,// w  w  w . ja v a 2  s  .co m
 * the (optional) second the class index (1-based, 'first' and 'last'
 * also supported).
 *
 * @param args   the commandline options
 * @throws Exception   if optimization fails for some reason
 */
public static void main(String[] args) throws Exception {
    if (args.length == 0) {
        System.err.println("\nUsage: PLSFilterAndLinearRegression <dataset> [classindex]\n");
        System.exit(1);
    }

    // load data
    Instances data = ExampleHelper.loadData(args[0], (args.length > 1) ? args[1] : null);

    // configure classifier we want to optimize
    PLSFilter pls = new PLSFilter();
    LinearRegression lr = new LinearRegression();
    FilteredClassifier fc = new FilteredClassifier();
    fc.setClassifier(lr);
    fc.setFilter(pls);
    // required for Weka > 3.7.13
    fc.setDoNotCheckForModifiedClassAttribute(true);

    // configure multisearch
    // 1. number of components
    ListParameter numComp = new ListParameter();
    numComp.setProperty("filter.numComponents");
    numComp.setList("2 5 7");
    // 2. ridge
    MathParameter ridge = new MathParameter();
    ridge.setProperty("classifier.ridge");
    ridge.setBase(10);
    ridge.setMin(-5);
    ridge.setMax(1);
    ridge.setStep(1);
    ridge.setExpression("pow(BASE,I)");
    // assemble everything
    MultiSearch multi = new MultiSearch();
    multi.setClassifier(fc);
    multi.setSearchParameters(new AbstractParameter[] { numComp, ridge });
    SelectedTag tag = new SelectedTag(DefaultEvaluationMetrics.EVALUATION_RMSE,
            new DefaultEvaluationMetrics().getTags());
    multi.setEvaluation(tag);

    // output configuration
    System.out.println("\nMultiSearch commandline:\n" + Utils.toCommandLine(multi));

    // optimize
    System.out.println("\nOptimizing...\n");
    multi.buildClassifier(data);
    System.out.println("Best setup:\n" + Utils.toCommandLine(multi.getBestClassifier()));
    System.out.println("Best parameters: " + multi.getGenerator().evaluate(multi.getBestValues()));
}

From source file:com.github.fracpete.multisearch.optimize.SMOKernels.java

License:Open Source License

/**
 * The first parameter must be dataset,/*from w  w w  . j a  v  a 2 s. c o  m*/
 * the (optional) second the class index (1-based, 'first' and 'last'
 * also supported).
 *
 * @param args   the commandline options
 * @throws Exception   if optimization fails for some reason
 */
public static void main(String[] args) throws Exception {
    if (args.length == 0) {
        System.err.println("\nUsage: SMOKernels <dataset> [classindex]\n");
        System.exit(1);
    }

    // load data
    Instances data = ExampleHelper.loadData(args[0], (args.length > 1) ? args[1] : null);

    // configure classifier we want to optimize
    SMO smo = new SMO();

    // configure multisearch
    // 1. RBFKernel
    ListParameter listRBF = new ListParameter();
    listRBF.setProperty("kernel");
    listRBF.setList(RBFKernel.class.getName());
    MathParameter gamma = new MathParameter();
    gamma.setProperty("kernel.gamma");
    gamma.setBase(10);
    gamma.setMin(-4);
    gamma.setMax(1);
    gamma.setStep(1);
    gamma.setExpression("pow(BASE,I)");
    ParameterGroup groupRBF = new ParameterGroup();
    groupRBF.setParameters(new AbstractParameter[] { listRBF, gamma });
    // 2. PolyKernel
    ListParameter listPoly = new ListParameter();
    listPoly.setProperty("kernel");
    listPoly.setList(PolyKernel.class.getName());
    MathParameter exp = new MathParameter();
    exp.setProperty("kernel.exponent");
    exp.setBase(10);
    exp.setMin(1);
    exp.setMax(5);
    exp.setStep(1);
    exp.setExpression("I");
    ParameterGroup groupPoly = new ParameterGroup();
    groupPoly.setParameters(new AbstractParameter[] { listPoly, exp });
    // assemble everything
    MultiSearch multi = new MultiSearch();
    multi.setClassifier(smo);
    multi.setSearchParameters(new AbstractParameter[] { groupRBF, groupPoly });
    SelectedTag tag = new SelectedTag(DefaultEvaluationMetrics.EVALUATION_ACC,
            new DefaultEvaluationMetrics().getTags());
    multi.setEvaluation(tag);

    // output configuration
    System.out.println("\nMultiSearch commandline:\n" + Utils.toCommandLine(multi));

    // optimize
    System.out.println("\nOptimizing...\n");
    multi.buildClassifier(data);
    System.out.println("Best setup:\n" + Utils.toCommandLine(multi.getBestClassifier()));
    System.out.println("Best parameters: " + multi.getGenerator().evaluate(multi.getBestValues()));
}

From source file:com.github.fracpete.multisearch.setupgenerator.J48ConfidenceFactor.java

License:Open Source License

/**
 * Outputs the commandlines.//from   w  w w.  j a v a 2s. c  o m
 *
 * @param args   the commandline options
 * @throws Exception   if setup generator fails for some reason
 */
public static void main(String[] args) throws Exception {
    // configure classifier we want to generate setups for
    J48 j48 = new J48();

    // configure generator
    MathParameter conf = new MathParameter();
    conf.setProperty("confidenceFactor");
    conf.setBase(10);
    conf.setMin(0.05);
    conf.setMax(0.75);
    conf.setStep(0.05);
    conf.setExpression("I");
    MultiSearch multi = new MultiSearch();
    multi.setClassifier(j48);
    SetupGenerator generator = new SetupGenerator();
    generator.setBaseObject(j48);
    generator.setParameters(new AbstractParameter[] { conf });

    // output configuration
    System.out.println("\nSetupgenerator commandline:\n" + Utils.toCommandLine(generator));

    // output commandlines
    System.out.println("\nCommandlines:\n");
    Enumeration<Serializable> enm = generator.setups();
    while (enm.hasMoreElements())
        System.out.println(Utils.toCommandLine(enm.nextElement()));
}

From source file:com.github.fracpete.multisearch.setupgenerator.PLSFilterAndLinearRegression.java

License:Open Source License

/**
 * Outputs the commandlines.//from w w  w. ja v  a2  s . co  m
 *
 * @param args   the commandline options
 * @throws Exception   if setup generator fails for some reason
 */
public static void main(String[] args) throws Exception {
    // configure classifier we want to generate setups for
    PLSFilter pls = new PLSFilter();
    LinearRegression lr = new LinearRegression();
    FilteredClassifier fc = new FilteredClassifier();
    fc.setClassifier(lr);
    fc.setFilter(pls);
    // required for Weka > 3.7.13
    fc.setDoNotCheckForModifiedClassAttribute(true);

    // configure generator
    // 1. number of components
    ListParameter numComp = new ListParameter();
    numComp.setProperty("filter.numComponents");
    numComp.setList("2 5 7");
    // 2. ridge
    MathParameter ridge = new MathParameter();
    ridge.setProperty("classifier.ridge");
    ridge.setBase(10);
    ridge.setMin(-5);
    ridge.setMax(1);
    ridge.setStep(1);
    ridge.setExpression("pow(BASE,I)");
    // assemble everything
    SetupGenerator generator = new SetupGenerator();
    generator.setBaseObject(fc);
    generator.setParameters(new AbstractParameter[] { numComp, ridge });

    // output configuration
    System.out.println("\nSetupgenerator commandline:\n" + Utils.toCommandLine(generator));

    // output commandlines
    System.out.println("\nCommandlines:\n");
    Enumeration<Serializable> enm = generator.setups();
    while (enm.hasMoreElements())
        System.out.println(Utils.toCommandLine(enm.nextElement()));
}

From source file:meka.core.OptionUtils.java

License:Open Source License

/**
 * Adds the OptionHandler to the options.
 *
 * @param options   the current list of options to extend
 * @param option    the option (without the leading dash)
 * @param value     the current value//ww w .  j a v a 2s.  co m
 */
public static void add(List<String> options, String option, OptionHandler value) {
    options.add("-" + option);
    options.add("" + Utils.toCommandLine(value));
}

From source file:meka.core.OptionUtils.java

License:Open Source License

/**
 * Returns the commandline string for the object.
 *
 * @param obj           the object to generate the commandline for
 * @return              the commandline/*from  w  w  w. j  a  v  a2s . c  om*/
 * @see                 Utils#toCommandLine(Object)
 */
public static String toCommandLine(Object obj) {
    return Utils.toCommandLine(obj);
}

From source file:meka.experiment.DefaultExperiment.java

License:Open Source License

/**
 * Runs the experiment.//from   w  w w  .j  av a  2  s.  com
 *
 * @return          null if successfully run, otherwise error message
 */
public String run() {
    String result;
    Instances dataset;
    List<EvaluationStatistics> stats;
    boolean incremental;

    debug("pre: run");

    result = null;
    m_Running = true;
    incremental = (m_StatisticsHandler instanceof IncrementalEvaluationStatisticsHandler)
            && (((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler).supportsIncrementalUpdate());
    debug("Incremental statistics? " + incremental);

    notifyExecutionStageListeners(ExecutionStageEvent.Stage.RUNNING);

    while (m_DatasetProvider.hasNext()) {
        // next dataset
        debug("pre: next-dataset");
        dataset = m_DatasetProvider.next();
        debug("post: next-dataset");
        if (dataset == null) {
            result = "Failed to obtain next dataset!";
            log(result);
            m_Running = false;
            break;
        }
        log("Using dataset: " + dataset.relationName());

        // iterate classifiers
        for (MultiLabelClassifier classifier : m_Classifiers) {
            // evaluation required?
            if (incremental) {
                if (!((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler).requires(classifier,
                        dataset)) {
                    log("Already present, skipping: " + Utils.toCommandLine(classifier) + " --> "
                            + dataset.relationName());
                    List<EvaluationStatistics> priorStats = ((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler)
                            .retrieve(classifier, dataset);
                    m_Statistics.addAll(priorStats);
                    notifyStatisticsNotificationListeners(priorStats);
                    continue;
                }
            }

            try {
                classifier = (MultiLabelClassifier) AbstractClassifier.makeCopy(classifier);
            } catch (Exception e) {
                result = handleException(
                        "Failed to create copy of classifier: " + classifier.getClass().getName(), e);
                log(result);
                m_Running = false;
                break;
            }

            if (m_Running && !m_Stopping) {
                // notify listeners
                notifyIterationNotificationListeners(classifier, dataset);
                log("Using classifier: " + OptionUtils.toCommandLine(classifier));

                // perform evaluation
                debug("pre: evaluator init");
                result = m_Evaluator.initialize();
                debug("post: evaluator init");
                if (result != null) {
                    m_Running = false;
                    break;
                }
                try {
                    debug("pre: evaluator evaluate");
                    stats = m_Evaluator.evaluate(classifier, dataset);
                    debug("post: evaluator evaluate");
                } catch (Exception e) {
                    result = handleException("Failed to evaluate dataset '" + dataset.relationName()
                            + "' with classifier: " + Utils.toCommandLine(classifier), e);
                    log(result);
                    m_Running = false;
                    break;
                }
                if (stats != null) {
                    m_Statistics.addAll(stats);
                    if (incremental)
                        ((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler).append(stats);
                    notifyStatisticsNotificationListeners(stats);
                }
            }

            if (!m_Running || m_Stopping)
                break;
        }
        if (!m_Running || m_Stopping)
            break;
    }

    if (m_Running && !m_Stopping) {
        if (!incremental)
            m_StatisticsHandler.write(m_Statistics);
    }
    if (!m_Running) {
        if (result == null)
            result = "Experiment interrupted!";
        else
            result = "Experiment interrupted: " + result;
    }

    if (result != null)
        log(result);

    m_Running = false;
    m_Stopping = false;

    debug("post: run");

    return result;
}

From source file:meka.experiment.evaluationstatistics.EvaluationStatistics.java

License:Open Source License

/**
 * Returns the statistics as string.// w w w  .j a v a 2  s .  co m
 *
 * @return      the statistics
 */
public String toString() {
    StringBuilder result = new StringBuilder();
    result.append("Classifier=").append(Utils.toCommandLine(m_Classifier)).append(",");
    result.append("Relation=").append(m_Relation).append(",");
    result.append(super.toString());
    return result.toString();
}

From source file:meka.experiment.evaluationstatistics.KeyValuePairs.java

License:Open Source License

/**
 * Checks whether the specified combination of classifier and dataset is required for evaluation
 * or already present from previous evaluation.
 *
 * @param classifier    the classifier to check
 * @param dataset       the dataset to check
 * @return              true if it needs evaluating
 *//* w ww  .j  a  v  a 2s.c  om*/
public boolean requires(MultiLabelClassifier classifier, Instances dataset) {
    boolean result;
    String cls;
    String rel;

    result = true;

    cls = Utils.toCommandLine(classifier);
    rel = dataset.relationName();

    for (EvaluationStatistics stat : m_Statistics) {
        if (stat.getCommandLine().equals(cls) && stat.getRelation().equals(rel)) {
            result = false;
            break;
        }
    }

    return result;
}