List of usage examples for weka.classifiers.functions.supportVector RBFKernel RBFKernel
public RBFKernel()
From source file:etc.aloe.oilspill2010.TrainingImpl.java
@Override public WekaModel train(ExampleSet examples) { //These settings aren't terrible SMO smo = new SMO(); RBFKernel rbf = new RBFKernel(); rbf.setGamma(0.5);/* www . jav a2 s. co m*/ smo.setKernel(rbf); smo.setC(1.5); //These also work pretty ok Logistic log = new Logistic(); log.setRidge(100); Classifier classifier = log; try { System.out.print("Training on " + examples.size() + " examples... "); classifier.buildClassifier(examples.getInstances()); System.out.println("done."); WekaModel model = new WekaModel(classifier); return model; } catch (Exception ex) { System.err.println("Unable to train classifier."); System.err.println("\t" + ex.getMessage()); return null; } }
From source file:jjj.asap.sas.models1.job.BuildRBFKernelModels.java
License:Open Source License
@Override protected void run() throws Exception { // validate args if (!Bucket.isBucket("datasets", inputBucket)) { throw new FileNotFoundException(inputBucket); }//w ww . j av a 2 s .c o m if (!Bucket.isBucket("models", outputBucket)) { throw new FileNotFoundException(outputBucket); } // init multi-threading Job.startService(); final Queue<Future<Object>> queue = new LinkedList<Future<Object>>(); // get the input from the bucket List<String> names = Bucket.getBucketItems("datasets", this.inputBucket); for (String dsn : names) { SMO smo = new SMO(); smo.setFilterType(new SelectedTag(SMO.FILTER_NONE, SMO.TAGS_FILTER)); smo.setBuildLogisticModels(true); RBFKernel kernel = new RBFKernel(); kernel.setGamma(0.05); smo.setKernel(kernel); AttributeSelectedClassifier asc = new AttributeSelectedClassifier(); asc.setEvaluator(new InfoGainAttributeEval()); Ranker ranker = new Ranker(); ranker.setThreshold(0.01); asc.setSearch(ranker); asc.setClassifier(smo); queue.add(Job.submit(new ModelBuilder(dsn, "InfoGain-SMO-RBFKernel", asc, this.outputBucket))); } // wait on complete Progress progress = new Progress(queue.size(), this.getClass().getSimpleName()); while (!queue.isEmpty()) { try { queue.remove().get(); } catch (Exception e) { Job.log("ERROR", e.toString()); } progress.tick(); } progress.done(); Job.stopService(); }
From source file:machinelearningproject.MachineLearningProject.java
/** * @param args the command line arguments *//* www .j a v a2 s . com*/ public static void main(String[] args) throws Exception { // TODO code application logic here DataSource source = new DataSource("D:\\spambase.arff"); // DataSource source = new DataSource("D:\\weather-nominal.arff"); Instances instances = source.getDataSet(); int numAttr = instances.numAttributes(); instances.setClassIndex(instances.numAttributes() - 1); int runs = 5; int seed = 15; for (int i = 0; i < runs; i++) { //randomize data seed = seed + 1; // the seed for randomizing the data Random rand = new Random(seed); // create seeded number generator Instances randData = new Instances(instances); // create copy of original data Collections.shuffle(randData); Evaluation evalDTree = new Evaluation(randData); Evaluation evalRF = new Evaluation(randData); Evaluation evalSVM = new Evaluation(randData); int folds = 10; for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n, rand); Instances test = randData.testCV(folds, n); //instantiate classifiers DecisionTree dtree = new DecisionTree(); RandomForest rf = new RandomForest(100); SMO svm = new SMO(); RBFKernel rbfKernel = new RBFKernel(); double gamma = 0.70; rbfKernel.setGamma(gamma); dtree.buildClassifier(train); rf.buildClassifier(train); svm.buildClassifier(train); evalDTree.evaluateModel(dtree, test); evalRF.evaluateModel(rf, test); evalSVM.evaluateModel(svm, test); } System.out.println("=== Decision Tree Evaluation ==="); System.out.println(evalDTree.toSummaryString()); System.out.println(evalDTree.toClassDetailsString()); System.out.println(evalDTree.toMatrixString()); System.out.println("=== Random Forest Evaluation ==="); System.out.println(evalRF.toSummaryString()); System.out.println(evalRF.toClassDetailsString()); System.out.println(evalRF.toMatrixString()); System.out.println("=== SVM Evaluation ==="); System.out.println(evalSVM.toSummaryString()); System.out.println(evalSVM.toClassDetailsString()); System.out.println(evalSVM.toMatrixString()); } }
From source file:org.opentox.jaqpot3.qsar.trainer.SvmRegression.java
License:Open Source License
@Override public Model train(Instances data) throws JaqpotException { try {/*from w w w . ja va 2s . c o m*/ Attribute target = data.attribute(targetUri.toString()); if (target == null) { throw new QSARException("The prediction feature you provided was not found in the dataset"); } else { if (!target.isNumeric()) { throw new QSARException("The prediction feature you provided is not numeric."); } } data.setClass(target); //data.deleteAttributeAt(0);//remove the first attribute, i.e. 'compound_uri' or 'URI' /* Very important: place the target feature at the end! (target = last)*/ int numAttributes = data.numAttributes(); int classIndex = data.classIndex(); Instances orderedTrainingSet = null; List<String> properOrder = new ArrayList<String>(numAttributes); for (int j = 0; j < numAttributes; j++) { if (j != classIndex) { properOrder.add(data.attribute(j).name()); } } properOrder.add(data.attribute(classIndex).name()); try { orderedTrainingSet = InstancesUtil.sortByFeatureAttrList(properOrder, data, -1); } catch (JaqpotException ex) { logger.error(null, ex); } orderedTrainingSet.setClass(orderedTrainingSet.attribute(targetUri.toString())); getTask().getMeta() .addComment("Dataset successfully retrieved and converted into a weka.core.Instances object"); UpdateTask firstTaskUpdater = new UpdateTask(getTask()); firstTaskUpdater.setUpdateMeta(true); firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary? try { firstTaskUpdater.update(); } catch (DbException ex) { throw new JaqpotException(ex); } finally { try { firstTaskUpdater.close(); } catch (DbException ex) { throw new JaqpotException(ex); } } Model m = new Model(Configuration.getBaseUri().augment("model", getUuid().toString())); // INITIALIZE THE REGRESSOR regressor SVMreg regressor = new SVMreg(); final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) }; Kernel svm_kernel = null; if (kernel.equalsIgnoreCase("rbf")) { RBFKernel rbf_kernel = new RBFKernel(); rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma))); rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); svm_kernel = rbf_kernel; } else if (kernel.equalsIgnoreCase("polynomial")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree))); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } else if (kernel.equalsIgnoreCase("linear")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent((double) 1.0); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } try { regressor.setOptions(regressorOptions); } catch (final Exception ex) { throw new QSARException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or " + "tolerance = {" + tolerance + "}.", ex); } regressor.setKernel(svm_kernel); // START TRAINING & CREATE MODEL try { regressor.buildClassifier(orderedTrainingSet); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(orderedTrainingSet); eval.evaluateModel(regressor, orderedTrainingSet); String stats = eval.toSummaryString("", false); ActualModel am = new ActualModel(regressor); am.setStatistics(stats); m.setActualModel(am); // m.setStatistics(stats); } catch (NotSerializableException ex) { String message = "Model is not serializable"; logger.error(message, ex); throw new JaqpotException(message, ex); } catch (final Exception ex) { throw new QSARException("Unexpected condition while trying to train " + "the model. Possible explanation : {" + ex.getMessage() + "}", ex); } m.setAlgorithm(getAlgorithm()); m.setCreatedBy(getTask().getCreatedBy()); m.setDataset(datasetUri); m.addDependentFeatures(dependentFeature); try { dependentFeature.loadFromRemote(); } catch (ServiceInvocationException ex) { java.util.logging.Logger.getLogger(SvmRegression.class.getName()).log(Level.SEVERE, null, ex); } m.addDependentFeatures(dependentFeature); m.setIndependentFeatures(independentFeatures); String predictionFeatureUri = null; Feature predictedFeature = publishFeature(m, dependentFeature.getUnits(), "Feature created as prediction feature for SVM model " + m.getUri(), datasetUri, featureService); m.addPredictedFeatures(predictedFeature); predictionFeatureUri = predictedFeature.getUri().toString(); getTask().getMeta().addComment("Prediction feature " + predictionFeatureUri + " was created."); /* SET PARAMETERS FOR THE TRAINED MODEL */ m.setParameters(new HashSet<Parameter>()); Parameter<String> kernelParam = new Parameter("kernel", new LiteralValue<String>(kernel)) .setScope(Parameter.ParameterScope.OPTIONAL); kernelParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> costParam = new Parameter("cost", new LiteralValue<Double>(cost)) .setScope(Parameter.ParameterScope.OPTIONAL); costParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> gammaParam = new Parameter("gamma", new LiteralValue<Double>(gamma)) .setScope(Parameter.ParameterScope.OPTIONAL); gammaParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> epsilonParam = new Parameter("espilon", new LiteralValue<Double>(epsilon)) .setScope(Parameter.ParameterScope.OPTIONAL); epsilonParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Integer> degreeParam = new Parameter("degree", new LiteralValue<Integer>(degree)) .setScope(Parameter.ParameterScope.OPTIONAL); degreeParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); Parameter<Double> toleranceParam = new Parameter("tolerance", new LiteralValue<Double>(tolerance)) .setScope(Parameter.ParameterScope.OPTIONAL); toleranceParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong())); m.getParameters().add(kernelParam); m.getParameters().add(costParam); m.getParameters().add(gammaParam); m.getParameters().add(epsilonParam); m.getParameters().add(degreeParam); m.getParameters().add(toleranceParam); //save the instances being predicted to abstract trainer for calculating DoA predictedInstances = orderedTrainingSet; excludeAttributesDoA.add(dependentFeature.getUri().toString()); return m; } catch (QSARException ex) { logger.debug(null, ex); throw new JaqpotException(ex); } }
From source file:org.opentox.qsar.processors.trainers.classification.SVCTrainer.java
License:Open Source License
public QSARModel train(Instances data) throws QSARException { // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING. final String rand = java.util.UUID.randomUUID().toString(); final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff"; final File tempFile = new File(temporaryFilePath); // SAVE THE DATA IN THE TEMPORARY FILE try {/* w ww .j a va 2 s. c o m*/ ArffSaver dataSaver = new ArffSaver(); dataSaver.setInstances(data); dataSaver.setDestination(new FileOutputStream(tempFile)); dataSaver.writeBatch(); if (!tempFile.exists()) { throw new IOException("Temporary File was not created"); } } catch (final IOException ex) {/* * The content of the dataset cannot be * written to the destination file due to * some communication issue. */ tempFile.delete(); throw new RuntimeException( "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex); } // INITIALIZE THE CLASSIFIER SMO classifier = new SMO(); classifier.setEpsilon(0.1); classifier.setToleranceParameter(tolerance); // CONSTRUCT A KERNEL ACCORDING TO THE POSTED PARAMETERS // SUPPORTED KERNELS ARE {rbf, linear, polynomial} Kernel svc_kernel = null; if (this.kernel.equalsIgnoreCase("rbf")) { RBFKernel rbf_kernel = new RBFKernel(); rbf_kernel.setGamma(gamma); rbf_kernel.setCacheSize(cacheSize); svc_kernel = rbf_kernel; } else if (this.kernel.equalsIgnoreCase("polynomial")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent(degree); poly_kernel.setCacheSize(cacheSize); poly_kernel.setUseLowerOrder(true); svc_kernel = poly_kernel; } else if (this.kernel.equalsIgnoreCase("linear")) { PolyKernel linear_kernel = new PolyKernel(); linear_kernel.setExponent((double) 1.0); linear_kernel.setCacheSize(cacheSize); linear_kernel.setUseLowerOrder(true); svc_kernel = linear_kernel; } classifier.setKernel(svc_kernel); String modelFilePath = ServerFolders.models_weka + "/" + uuid.toString(); String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath, /// Save the model in the following directory "-d", modelFilePath }; // AFTER ALL, BUILD THE CLASSIFICATION MODEL AND SAVE IT AS A SERIALIZED // WEKA FILE IN THE CORRESPONDING DIRECTORY. try { Evaluation.evaluateModel(classifier, generalOptions); } catch (final Exception ex) { tempFile.delete(); throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train " + "a support vector classification model. Possible explanation : {" + ex.getMessage() + "}", ex); } ArrayList<Feature> independentFeatures = new ArrayList<Feature>(); for (int i = 0; i < data.numAttributes(); i++) { Feature f = new Feature(data.attribute(i).name()); if (data.classIndex() != i) { independentFeatures.add(f); } } Feature dependentFeature = new Feature(data.classAttribute().name()); Feature predictedFeature = dependentFeature; QSARModel model = new QSARModel(); model.setCode(uuid.toString()); model.setAlgorithm(YaqpAlgorithms.SVC); model.setPredictionFeature(predictedFeature); model.setDependentFeature(dependentFeature); model.setIndependentFeatures(independentFeatures); model.setDataset(datasetUri); model.setParams(getParameters()); model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT); tempFile.delete(); return model; }
From source file:org.opentox.qsar.processors.trainers.regression.SVMTrainer.java
License:Open Source License
/** * * @param data/*from w w w. java 2s .co m*/ * @return * @throws QSARException */ public QSARModel train(Instances data) throws QSARException { // NOTE: The checks (check if data is null and if the prediction feature is // acceptable are found in WekaRegressor. The method preprocessData(Instances) // does this job. // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING. final String rand = java.util.UUID.randomUUID().toString(); final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff"; final File tempFile = new File(temporaryFilePath); // SAVE THE DATA IN THE TEMPORARY FILE try { ArffSaver dataSaver = new ArffSaver(); dataSaver.setInstances(data); dataSaver.setDestination(new FileOutputStream(tempFile)); dataSaver.writeBatch(); } catch (final IOException ex) { tempFile.delete(); throw new RuntimeException( "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex); } // INITIALIZE THE REGRESSOR SVMreg regressor = new SVMreg(); final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) }; Kernel svm_kernel = null; if (kernel.equalsIgnoreCase("rbf")) { RBFKernel rbf_kernel = new RBFKernel(); rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma))); rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); svm_kernel = rbf_kernel; } else if (kernel.equalsIgnoreCase("polynomial")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree))); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } else if (kernel.equalsIgnoreCase("linear")) { PolyKernel poly_kernel = new PolyKernel(); poly_kernel.setExponent((double) 1.0); poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize))); poly_kernel.setUseLowerOrder(true); svm_kernel = poly_kernel; } regressor.setKernel(svm_kernel); try { regressor.setOptions(regressorOptions); } catch (final Exception ex) { tempFile.delete(); throw new IllegalArgumentException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or " + "tolerance = {" + tolerance + "}.", ex); } // PERFORM THE TRAINING String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath, /// Save the model in the following directory "-d", ServerFolders.models_weka + "/" + uuid }; try { Evaluation.evaluateModel(regressor, generalOptions); } catch (final Exception ex) { tempFile.delete(); throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train " + "an SVM model. Possible explanation : {" + ex.getMessage() + "}", ex); } QSARModel model = new QSARModel(); model.setParams(getParameters()); model.setCode(uuid.toString()); model.setAlgorithm(YaqpAlgorithms.SVM); model.setDataset(datasetUri); model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT); ArrayList<Feature> independentFeatures = new ArrayList<Feature>(); for (int i = 0; i < data.numAttributes(); i++) { Feature f = new Feature(data.attribute(i).name()); if (data.classIndex() != i) { independentFeatures.add(f); } } Feature dependentFeature = new Feature(data.classAttribute().name()); Feature predictedFeature = dependentFeature; model.setDependentFeature(dependentFeature); model.setIndependentFeatures(independentFeatures); model.setPredictionFeature(predictedFeature); tempFile.delete(); return model; }
From source file:wekimini.learning.SVMModelBuilder.java
private void updateClassifier() { if (kernelType == KernelType.LINEAR) { Kernel k = getClassifier().getKernel(); if (k instanceof PolyKernel && ((PolyKernel) k).getExponent() == 1.0) { //do nothing; already got it } else {// ww w . j av a2s . c om PolyKernel nk = new PolyKernel(); nk.setExponent(1.0); ((SMO) classifier).setKernel(nk); } } else if (kernelType == KernelType.POLYNOMIAL) { Kernel k = getClassifier().getKernel(); if (k instanceof PolyKernel) { ((PolyKernel) k).setExponent(polyExponent); ((PolyKernel) k).setUseLowerOrder(polyUseLowerOrder); //return; } else { PolyKernel nk = new PolyKernel(); nk.setExponent(polyExponent); nk.setUseLowerOrder(polyUseLowerOrder); getClassifier().setKernel(nk); } } else { //RBF Kernel k = getClassifier().getKernel(); if (k instanceof RBFKernel) { ((RBFKernel) k).setGamma(rbfGamma); // return; } else { RBFKernel nk = new RBFKernel(); nk.setGamma(rbfGamma); getClassifier().setKernel(nk); } } ((SMO) classifier).setC(complexity); }