List of usage examples for weka.filters.supervised.attribute PLSFilter PLSFilter
public PLSFilter()
From source file:adams.data.conversion.SwapPLS.java
License:Open Source License
/** * Adds options to the internal list of options. *//*from w w w .ja v a2 s .c o m*/ @Override public void defineOptions() { super.defineOptions(); m_OptionManager.add("old-filter", "oldFilter", new PLSFilter()); m_OptionManager.add("exact-match", "exactMatch", false); m_OptionManager.add("new-filter", "newFilter", new PLS()); m_OptionManager.add("keep-num-components", "keepNumComponents", true); }
From source file:com.github.fracpete.multisearch.optimize.PLSFilterAndLinearRegression.java
License:Open Source License
/** * The first parameter must be dataset,/*from ww w .ja v a2 s. com*/ * the (optional) second the class index (1-based, 'first' and 'last' * also supported). * * @param args the commandline options * @throws Exception if optimization fails for some reason */ public static void main(String[] args) throws Exception { if (args.length == 0) { System.err.println("\nUsage: PLSFilterAndLinearRegression <dataset> [classindex]\n"); System.exit(1); } // load data Instances data = ExampleHelper.loadData(args[0], (args.length > 1) ? args[1] : null); // configure classifier we want to optimize PLSFilter pls = new PLSFilter(); LinearRegression lr = new LinearRegression(); FilteredClassifier fc = new FilteredClassifier(); fc.setClassifier(lr); fc.setFilter(pls); // required for Weka > 3.7.13 fc.setDoNotCheckForModifiedClassAttribute(true); // configure multisearch // 1. number of components ListParameter numComp = new ListParameter(); numComp.setProperty("filter.numComponents"); numComp.setList("2 5 7"); // 2. ridge MathParameter ridge = new MathParameter(); ridge.setProperty("classifier.ridge"); ridge.setBase(10); ridge.setMin(-5); ridge.setMax(1); ridge.setStep(1); ridge.setExpression("pow(BASE,I)"); // assemble everything MultiSearch multi = new MultiSearch(); multi.setClassifier(fc); multi.setSearchParameters(new AbstractParameter[] { numComp, ridge }); SelectedTag tag = new SelectedTag(DefaultEvaluationMetrics.EVALUATION_RMSE, new DefaultEvaluationMetrics().getTags()); multi.setEvaluation(tag); // output configuration System.out.println("\nMultiSearch commandline:\n" + Utils.toCommandLine(multi)); // optimize System.out.println("\nOptimizing...\n"); multi.buildClassifier(data); System.out.println("Best setup:\n" + Utils.toCommandLine(multi.getBestClassifier())); System.out.println("Best parameters: " + multi.getGenerator().evaluate(multi.getBestValues())); }
From source file:com.github.fracpete.multisearch.setupgenerator.PLSFilterAndLinearRegression.java
License:Open Source License
/** * Outputs the commandlines.//w w w .j ava2 s . co m * * @param args the commandline options * @throws Exception if setup generator fails for some reason */ public static void main(String[] args) throws Exception { // configure classifier we want to generate setups for PLSFilter pls = new PLSFilter(); LinearRegression lr = new LinearRegression(); FilteredClassifier fc = new FilteredClassifier(); fc.setClassifier(lr); fc.setFilter(pls); // required for Weka > 3.7.13 fc.setDoNotCheckForModifiedClassAttribute(true); // configure generator // 1. number of components ListParameter numComp = new ListParameter(); numComp.setProperty("filter.numComponents"); numComp.setList("2 5 7"); // 2. ridge MathParameter ridge = new MathParameter(); ridge.setProperty("classifier.ridge"); ridge.setBase(10); ridge.setMin(-5); ridge.setMax(1); ridge.setStep(1); ridge.setExpression("pow(BASE,I)"); // assemble everything SetupGenerator generator = new SetupGenerator(); generator.setBaseObject(fc); generator.setParameters(new AbstractParameter[] { numComp, ridge }); // output configuration System.out.println("\nSetupgenerator commandline:\n" + Utils.toCommandLine(generator)); // output commandlines System.out.println("\nCommandlines:\n"); Enumeration<Serializable> enm = generator.setups(); while (enm.hasMoreElements()) System.out.println(Utils.toCommandLine(enm.nextElement())); }
From source file:org.opentox.jaqpot3.qsar.trainer.PLSTrainer.java
License:Open Source License
@Override public Model train(Instances data) throws JaqpotException { Model model = new Model(Configuration.getBaseUri().augment("model", getUuid().toString())); data.setClass(data.attribute(targetUri.toString())); Boolean targetURIIncluded = false; for (Feature tempFeature : independentFeatures) { if (StringUtils.equals(tempFeature.getUri().toString(), targetUri.toString())) { targetURIIncluded = true;//w w w . j a v a 2s . c o m break; } } if (!targetURIIncluded) { independentFeatures.add(new Feature(targetUri)); } model.setIndependentFeatures(independentFeatures); /* * Train the PLS filter */ PLSFilter pls = new PLSFilter(); try { pls.setInputFormat(data); pls.setOptions(new String[] { "-C", Integer.toString(numComponents), "-A", pls_algorithm, "-P", preprocessing, "-U", doUpdateClass }); PLSFilter.useFilter(data, pls); } catch (Exception ex) { Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex); } PLSModel actualModel = new PLSModel(pls); try { PLSClassifier cls = new PLSClassifier(); cls.setFilter(pls); cls.buildClassifier(data); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(data); eval.evaluateModel(cls, data); String stats = eval.toSummaryString("", false); ActualModel am = new ActualModel(actualModel); am.setStatistics(stats); model.setActualModel(am); } catch (NotSerializableException ex) { Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex); throw new JaqpotException(ex); } catch (Exception ex) { Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex); throw new JaqpotException(ex); } model.setDataset(datasetUri); model.setAlgorithm(Algorithms.plsFilter()); model.getMeta().addTitle("PLS Model for " + datasetUri); Set<Parameter> parameters = new HashSet<Parameter>(); Parameter targetPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "target", new LiteralValue(targetUri.toString(), XSDDatatype.XSDstring)) .setScope(Parameter.ParameterScope.MANDATORY); Parameter nComponentsPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "numComponents", new LiteralValue(numComponents, XSDDatatype.XSDpositiveInteger)) .setScope(Parameter.ParameterScope.MANDATORY); Parameter preprocessingPrm = new Parameter( Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "preprocessing", new LiteralValue(preprocessing, XSDDatatype.XSDstring)).setScope(Parameter.ParameterScope.OPTIONAL); Parameter algorithmPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "algorithm", new LiteralValue(pls_algorithm, XSDDatatype.XSDstring)) .setScope(Parameter.ParameterScope.OPTIONAL); Parameter doUpdatePrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "doUpdateClass", new LiteralValue(doUpdateClass, XSDDatatype.XSDboolean)) .setScope(Parameter.ParameterScope.OPTIONAL); parameters.add(targetPrm); parameters.add(nComponentsPrm); parameters.add(preprocessingPrm); parameters.add(doUpdatePrm); parameters.add(algorithmPrm); model.setParameters(parameters); for (int i = 0; i < numComponents; i++) { Feature f = publishFeature(model, "", "PLS-" + i, datasetUri, featureService); model.addPredictedFeatures(f); } //save the instances being predicted to abstract trainer for calculating DoA predictedInstances = data; //in pls target is not excluded return model; }