List of usage examples for weka.classifiers.functions.supportVector PolyKernel setExponent
public void setExponent(double value)
From source file:activeSegmentation.learning.SMO.java
License:Open Source License
/** * Empty constructor//from w w w . jav a2 s . c o m */ public SMO() { super(); setBuildLogisticModels(true); //To be linear kernel, equals to used in the related papers setC(1.0); PolyKernel poly = new PolyKernel(); poly.setExponent(1.0); setKernel(poly); }
From source file:applications.FoodSpectrograms.java
public static void shapeletClassifier() { int nosExp = 3; ThreadedClassifierExperiment[] runs = new ThreadedClassifierExperiment[nosExp]; Thread[] threads = new Thread[nosExp]; for (int i = 0; i < nosExp; i++) { Classifier c = new SMO(); PolyKernel kernel = new PolyKernel(); kernel.setExponent(1); ((SMO) c).setKernel(kernel);/*from www . j a va2 s . c om*/ FullShapeletTransform s = new FullShapeletTransform(); s.setDebug(false); s.setNumberOfShapelets(train[i].numAttributes() / 2); int minLength = 5; int maxLength = train[i].numAttributes() / 4; // int maxLength=(train.numAttributes()-1)/10; s.setShapeletMinAndMax(minLength, maxLength); s.setQualityMeasure(QualityMeasures.ShapeletQualityChoice.F_STAT); s.turnOffLog(); runs[i] = new ThreadedClassifierExperiment(train[i], test[i], c); runs[i].setTransform(s); threads[i] = new Thread(runs[i]); } for (int i = 0; i < nosExp; i++) threads[i].start(); try { for (int i = 0; i < nosExp; i++) threads[i].join(); } catch (InterruptedException e) { System.out.println(" Interrupted!!"); } for (int i = 0; i < nosExp; i++) System.out.println(" ED Accuracy for " + names[i] + " is " + runs[i].getTestAccuracy()); }
From source file:development.CrossValidateShapelets.java
public static ArrayList<Classifier> setSingleClassifiers(ArrayList<String> names) { ArrayList<Classifier> sc = new ArrayList<>(); kNN n = new kNN(50); n.setCrossValidate(true);/* ww w .jav a 2s. co m*/ sc.add(n); names.add("kNN"); sc.add(new J48()); names.add("C45"); sc.add(new NaiveBayes()); names.add("NB"); BayesNet bn = new BayesNet(); sc.add(bn); names.add("BayesNet"); RandomForest rf = new RandomForest(); rf.setNumTrees(200); sc.add(rf); names.add("RandForest"); RotationForest rot = new RotationForest(); rot.setNumIterations(30); sc.add(rf); names.add("RotForest"); SMO svmL = new SMO(); PolyKernel kernel = new PolyKernel(); kernel.setExponent(1); svmL.setKernel(kernel); sc.add(svmL); names.add("SVML"); kernel = new PolyKernel(); kernel.setExponent(2); SMO svmQ = new SMO(); svmQ.setKernel(kernel); sc.add(svmQ); names.add("SVMQ"); return sc; }
From source file:org.opentox.jaqpot3.qsar.trainer.SvmRegression.java
License:Open Source License
@Override public Model train(Instances data) throws JaqpotException { try {/* w ww. ja v a 2 s. c o m*/ Attribute target = data.attribute(targetUri.toString()); if (target == null) { throw new QSARException("The prediction feature you provided was not found in the dataset"); } else { if (!target.isNumeric()) { throw new QSARException("The prediction feature you provided is not numeric."); } } data.setClass(target); //data.deleteAttributeAt(0);//remove the first attribute, i.e. 'compound_uri' or 'URI' /* Very important: place the target feature at the end! (target = last)*/ int numAttributes = data.numAttributes(); int classIndex = data.classIndex(); Instances orderedTrainingSet = null; List<String> properOrder = new ArrayList<String>(numAttributes); for (int j = 0; j < numAttributes; j++) { if (j != classIndex) { properOrder.add(data.attribute(j).name()); } } properOrder.add(data.attribute(classIndex).name()); try { orderedTrainingSet = InstancesUtil.sortByFeatureAttrList(properOrder, data, -1); } catch (JaqpotException ex) { logger.error(null, ex); } orderedTrainingSet.setClass(orderedTrainingSet.attribute(targetUri.toString())); getTask().getMeta() .addComment("Dataset successfully retrieved and converted into a weka.core.Instances object"); UpdateTask firstTaskUpdater = new UpdateTask(getTask()); firstTaskUpdater.setUpdateMeta(true); firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary? try { firstTaskUpdater.update(); } catch (DbException ex) { throw new JaqpotException(ex); } finally { try { firstTaskUpdater.close(); } catch (DbException ex) { throw new JaqpotException(ex); } } Model m = new Model(Configuration.getBaseUri().augment("model", getUuid().toString())); // INITIALIZE THE REGRESSOR regressor SVMreg regressor = new SVMreg(); final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) }; Kernel svm_kernel = null; if (kernel.equalsIgnoreCase("rbf")) { RBFKernel rbf_kernel = new RBFKernel(); rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma))); rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); svm_kernel = rbf_kernel; } else if (kernel.equalsIgnoreCase("polynomial")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree))); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } else if (kernel.equalsIgnoreCase("linear")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent((double) 1.0); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } try { regressor.setOptions(regressorOptions); } catch (final Exception ex) { throw new QSARException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or " + "tolerance = {" + tolerance + "}.", ex); } regressor.setKernel(svm_kernel); // START TRAINING & CREATE MODEL try { regressor.buildClassifier(orderedTrainingSet); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(orderedTrainingSet); eval.evaluateModel(regressor, orderedTrainingSet); String stats = eval.toSummaryString("", false); ActualModel am = new ActualModel(regressor); am.setStatistics(stats); m.setActualModel(am); // m.setStatistics(stats); } catch (NotSerializableException ex) { String message = "Model is not serializable"; logger.error(message, ex); throw new JaqpotException(message, ex); } catch (final Exception ex) { throw new QSARException("Unexpected condition while trying to train " + "the model. Possible explanation : {" + ex.getMessage() + "}", ex); } m.setAlgorithm(getAlgorithm()); m.setCreatedBy(getTask().getCreatedBy()); m.setDataset(datasetUri); m.addDependentFeatures(dependentFeature); try { dependentFeature.loadFromRemote(); } catch (ServiceInvocationException ex) { java.util.logging.Logger.getLogger(SvmRegression.class.getName()).log(Level.SEVERE, null, ex); } m.addDependentFeatures(dependentFeature); m.setIndependentFeatures(independentFeatures); String predictionFeatureUri = null; Feature predictedFeature = publishFeature(m, dependentFeature.getUnits(), "Feature created as prediction feature for SVM model " + m.getUri(), datasetUri, featureService); m.addPredictedFeatures(predictedFeature); predictionFeatureUri = predictedFeature.getUri().toString(); getTask().getMeta().addComment("Prediction feature " + predictionFeatureUri + " was created."); /* SET PARAMETERS FOR THE TRAINED MODEL */ m.setParameters(new HashSet<Parameter>()); Parameter<String> kernelParam = new Parameter("kernel", new LiteralValue<String>(kernel)) .setScope(Parameter.ParameterScope.OPTIONAL); kernelParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> costParam = new Parameter("cost", new LiteralValue<Double>(cost)) .setScope(Parameter.ParameterScope.OPTIONAL); costParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> gammaParam = new Parameter("gamma", new LiteralValue<Double>(gamma)) .setScope(Parameter.ParameterScope.OPTIONAL); gammaParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> epsilonParam = new Parameter("espilon", new LiteralValue<Double>(epsilon)) .setScope(Parameter.ParameterScope.OPTIONAL); epsilonParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Integer> degreeParam = new Parameter("degree", new LiteralValue<Integer>(degree)) .setScope(Parameter.ParameterScope.OPTIONAL); degreeParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> toleranceParam = new Parameter("tolerance", new LiteralValue<Double>(tolerance)) .setScope(Parameter.ParameterScope.OPTIONAL); toleranceParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); m.getParameters().add(kernelParam); m.getParameters().add(costParam); m.getParameters().add(gammaParam); m.getParameters().add(epsilonParam); m.getParameters().add(degreeParam); m.getParameters().add(toleranceParam); //save the instances being predicted to abstract trainer for calculating DoA predictedInstances = orderedTrainingSet; excludeAttributesDoA.add(dependentFeature.getUri().toString()); return m; } catch (QSARException ex) { logger.debug(null, ex); throw new JaqpotException(ex); } }
From source file:org.opentox.qsar.processors.trainers.classification.SVCTrainer.java
License:Open Source License
public QSARModel train(Instances data) throws QSARException { // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING. final String rand = java.util.UUID.randomUUID().toString(); final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff"; final File tempFile = new File(temporaryFilePath); // SAVE THE DATA IN THE TEMPORARY FILE try {// w w w. j ava 2 s .c o m ArffSaver dataSaver = new ArffSaver(); dataSaver.setInstances(data); dataSaver.setDestination(new FileOutputStream(tempFile)); dataSaver.writeBatch(); if (!tempFile.exists()) { throw new IOException("Temporary File was not created"); } } catch (final IOException ex) {/* * The content of the dataset cannot be * written to the destination file due to * some communication issue. */ tempFile.delete(); throw new RuntimeException( "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex); } // INITIALIZE THE CLASSIFIER SMO classifier = new SMO(); classifier.setEpsilon(0.1); classifier.setToleranceParameter(tolerance); // CONSTRUCT A KERNEL ACCORDING TO THE POSTED PARAMETERS // SUPPORTED KERNELS ARE {rbf, linear, polynomial} Kernel svc_kernel = null; if (this.kernel.equalsIgnoreCase("rbf")) { RBFKernel rbf_kernel = new RBFKernel(); rbf_kernel.setGamma(gamma); rbf_kernel.setCacheSize(cacheSize); svc_kernel = rbf_kernel; } else if (this.kernel.equalsIgnoreCase("polynomial")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent(degree); poly_kernel.setCacheSize(cacheSize); poly_kernel.setUseLowerOrder(true); svc_kernel = poly_kernel; } else if (this.kernel.equalsIgnoreCase("linear")) { PolyKernel linear_kernel = new PolyKernel(); linear_kernel.setExponent((double) 1.0); linear_kernel.setCacheSize(cacheSize); linear_kernel.setUseLowerOrder(true); svc_kernel = linear_kernel; } classifier.setKernel(svc_kernel); String modelFilePath = ServerFolders.models_weka + "/" + uuid.toString(); String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath, /// Save the model in the following directory "-d", modelFilePath }; // AFTER ALL, BUILD THE CLASSIFICATION MODEL AND SAVE IT AS A SERIALIZED // WEKA FILE IN THE CORRESPONDING DIRECTORY. try { Evaluation.evaluateModel(classifier, generalOptions); } catch (final Exception ex) { tempFile.delete(); throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train " + "a support vector classification model. Possible explanation : {" + ex.getMessage() + "}", ex); } ArrayList<Feature> independentFeatures = new ArrayList<Feature>(); for (int i = 0; i < data.numAttributes(); i++) { Feature f = new Feature(data.attribute(i).name()); if (data.classIndex() != i) { independentFeatures.add(f); } } Feature dependentFeature = new Feature(data.classAttribute().name()); Feature predictedFeature = dependentFeature; QSARModel model = new QSARModel(); model.setCode(uuid.toString()); model.setAlgorithm(YaqpAlgorithms.SVC); model.setPredictionFeature(predictedFeature); model.setDependentFeature(dependentFeature); model.setIndependentFeatures(independentFeatures); model.setDataset(datasetUri); model.setParams(getParameters()); model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT); tempFile.delete(); return model; }
From source file:org.opentox.qsar.processors.trainers.regression.SVMTrainer.java
License:Open Source License
/** * * @param data/*from w w w. jav a2s. c o m*/ * @return * @throws QSARException */ public QSARModel train(Instances data) throws QSARException { // NOTE: The checks (check if data is null and if the prediction feature is // acceptable are found in WekaRegressor. The method preprocessData(Instances) // does this job. // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING. final String rand = java.util.UUID.randomUUID().toString(); final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff"; final File tempFile = new File(temporaryFilePath); // SAVE THE DATA IN THE TEMPORARY FILE try { ArffSaver dataSaver = new ArffSaver(); dataSaver.setInstances(data); dataSaver.setDestination(new FileOutputStream(tempFile)); dataSaver.writeBatch(); } catch (final IOException ex) { tempFile.delete(); throw new RuntimeException( "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex); } // INITIALIZE THE REGRESSOR SVMreg regressor = new SVMreg(); final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) }; Kernel svm_kernel = null; if (kernel.equalsIgnoreCase("rbf")) { RBFKernel rbf_kernel = new RBFKernel(); rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma))); rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); svm_kernel = rbf_kernel; } else if (kernel.equalsIgnoreCase("polynomial")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree))); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } else if (kernel.equalsIgnoreCase("linear")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent((double) 1.0); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } regressor.setKernel(svm_kernel); try { regressor.setOptions(regressorOptions); } catch (final Exception ex) { tempFile.delete(); throw new IllegalArgumentException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or " + "tolerance = {" + tolerance + "}.", ex); } // PERFORM THE TRAINING String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath, /// Save the model in the following directory "-d", ServerFolders.models_weka + "/" + uuid }; try { Evaluation.evaluateModel(regressor, generalOptions); } catch (final Exception ex) { tempFile.delete(); throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train " + "an SVM model. Possible explanation : {" + ex.getMessage() + "}", ex); } QSARModel model = new QSARModel(); model.setParams(getParameters()); model.setCode(uuid.toString()); model.setAlgorithm(YaqpAlgorithms.SVM); model.setDataset(datasetUri); model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT); ArrayList<Feature> independentFeatures = new ArrayList<Feature>(); for (int i = 0; i < data.numAttributes(); i++) { Feature f = new Feature(data.attribute(i).name()); if (data.classIndex() != i) { independentFeatures.add(f); } } Feature dependentFeature = new Feature(data.classAttribute().name()); Feature predictedFeature = dependentFeature; model.setDependentFeature(dependentFeature); model.setIndependentFeatures(independentFeatures); model.setPredictionFeature(predictedFeature); tempFile.delete(); return model; }
From source file:wekimini.learning.SVMModelBuilder.java
public SVMModelBuilder() { classifier = new SMO(); kernelType = KernelType.POLYNOMIAL;/* w ww. j a va 2 s . c o m*/ PolyKernel k = new PolyKernel(); k.setExponent(polyExponent); ((SMO) classifier).setKernel(k); }
From source file:wekimini.learning.SVMModelBuilder.java
public void setPolyExponent(double e) { this.polyExponent = e; PolyKernel k = new PolyKernel(); k.setExponent(polyExponent); k.setUseLowerOrder(polyUseLowerOrder); ((SMO) classifier).setKernel(k);//from www .j av a 2 s. c o m }
From source file:wekimini.learning.SVMModelBuilder.java
public void setPolyUseLowerOrder(boolean u) { polyUseLowerOrder = u;// w ww .j a va2 s .c om PolyKernel k = new PolyKernel(); k.setExponent(polyExponent); k.setUseLowerOrder(polyUseLowerOrder); ((SMO) classifier).setKernel(k); }
From source file:wekimini.learning.SVMModelBuilder.java
private void updateClassifier() { if (kernelType == KernelType.LINEAR) { Kernel k = getClassifier().getKernel(); if (k instanceof PolyKernel && ((PolyKernel) k).getExponent() == 1.0) { //do nothing; already got it } else {//w ww. j a va2s . c o m PolyKernel nk = new PolyKernel(); nk.setExponent(1.0); ((SMO) classifier).setKernel(nk); } } else if (kernelType == KernelType.POLYNOMIAL) { Kernel k = getClassifier().getKernel(); if (k instanceof PolyKernel) { ((PolyKernel) k).setExponent(polyExponent); ((PolyKernel) k).setUseLowerOrder(polyUseLowerOrder); //return; } else { PolyKernel nk = new PolyKernel(); nk.setExponent(polyExponent); nk.setUseLowerOrder(polyUseLowerOrder); getClassifier().setKernel(nk); } } else { //RBF Kernel k = getClassifier().getKernel(); if (k instanceof RBFKernel) { ((RBFKernel) k).setGamma(rbfGamma); // return; } else { RBFKernel nk = new RBFKernel(); nk.setGamma(rbfGamma); getClassifier().setKernel(nk); } } ((SMO) classifier).setC(complexity); }