Example usage for weka.classifiers.functions LibSVM TAGS_KERNELTYPE

List of usage examples for weka.classifiers.functions LibSVM TAGS_KERNELTYPE

Introduction

In this page you can find the example usage for weka.classifiers.functions LibSVM TAGS_KERNELTYPE.

Prototype

Tag[] TAGS_KERNELTYPE

To view the source code for weka.classifiers.functions LibSVM TAGS_KERNELTYPE.

Click Source Link

Document

the different kernel types.

Usage

From source file:com.guidefreitas.locator.services.PredictionService.java

public Evaluation train() {
    try {//from ww w .j a va 2 s.  c  om
        String arffData = this.generateTrainData();
        InputStream stream = new ByteArrayInputStream(arffData.getBytes(StandardCharsets.UTF_8));
        DataSource source = new DataSource(stream);
        Instances data = source.getDataSet();
        data.setClassIndex(data.numAttributes() - 1);
        this.classifier = new LibSVM();
        this.classifier.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_POLYNOMIAL, LibSVM.TAGS_KERNELTYPE));
        this.classifier.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE));

        Evaluation eval = new Evaluation(data);
        eval.crossValidateModel(this.classifier, data, 10, new Random(1));

        this.classifier.buildClassifier(data);
        return eval;
    } catch (Exception ex) {
        Logger.getLogger(PredictionService.class.getName()).log(Level.SEVERE, null, ex);
    }

    return null;
}

From source file:de.upb.timok.oneclassclassifier.WekaSvmClassifier.java

License:Open Source License

public WekaSvmClassifier(int useProbability, double gamma, double nu, double costs, int kernelType, double eps,
        int degree, ScalingMethod scalingMethod) {
    wekaSvm = new LibSVM();
    wekaSvm.setCost(costs);/*from www  .  j  a v  a  2 s  . c o m*/
    wekaSvm.setGamma(gamma);
    wekaSvm.setNu(nu);
    wekaSvm.setEps(eps);
    wekaSvm.setDegree(degree);
    if (scalingMethod == ScalingMethod.NORMALIZE) {
        filter = new Normalize();
    } else if (scalingMethod == ScalingMethod.STANDARDIZE) {
        filter = new Standardize();
    }
    if (useProbability > 0) {
        wekaSvm.setProbabilityEstimates(true);
    } else {
        wekaSvm.setProbabilityEstimates(false);
    }
    // * Set type of SVM (default: 0)
    // * 0 = C-SVC
    // * 1 = nu-SVC
    // * 2 = one-class SVM
    // * 3 = epsilon-SVR
    // * 4 = nu-SVR</pre>
    wekaSvm.setSVMType(new SelectedTag(LibSVM.SVMTYPE_ONE_CLASS_SVM, LibSVM.TAGS_SVMTYPE));
    // * <pre> -K &lt;int&gt;
    // * Set type of kernel function (default: 2)
    // * 0 = linear: u'*v
    // * 1 = polynomial: (gamma*u'*v + coef0)^degree
    // * 2 = radial basis function: exp(-gamma*|u-v|^2)
    // * 3 = sigmoid: tanh(gamma*u'*v + coef0)</pre>
    wekaSvm.setKernelType(new SelectedTag(kernelType, LibSVM.TAGS_KERNELTYPE));
}

From source file:org.jaqpot.algorithm.resource.WekaSVM.java

License:Open Source License

@POST
@Path("training")
public Response training(TrainingRequest request) {
    try {/*from w  w w .  j  av a2  s.  co  m*/
        if (request.getDataset().getDataEntry().isEmpty()
                || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) {
            return Response.status(Response.Status.BAD_REQUEST).entity(
                    ErrorReportFactory.badRequest("Dataset is empty", "Cannot train model on empty dataset"))
                    .build();
        }
        List<String> features = request.getDataset().getDataEntry().stream().findFirst().get().getValues()
                .keySet().stream().collect(Collectors.toList());

        Instances data = InstanceUtils.createFromDataset(request.getDataset(), request.getPredictionFeature());
        Map<String, Object> parameters = request.getParameters() != null ? request.getParameters()
                : new HashMap<>();

        LibSVM regressor = new LibSVM();
        Double epsilon = Double.parseDouble(parameters.getOrDefault("epsilon", _epsilon).toString());
        Double cacheSize = Double.parseDouble(parameters.getOrDefault("cacheSize", _cacheSize).toString());
        Double gamma = Double.parseDouble(parameters.getOrDefault("gamma", _gamma).toString());
        Double coeff0 = Double.parseDouble(parameters.getOrDefault("coeff0", _coeff0).toString());
        Double cost = Double.parseDouble(parameters.getOrDefault("cost", _cost).toString());
        Double nu = Double.parseDouble(parameters.getOrDefault("nu", _nu).toString());
        Double loss = Double.parseDouble(parameters.getOrDefault("loss", _loss).toString());
        Integer degree = Integer.parseInt(parameters.getOrDefault("degree", _degree).toString());

        regressor.setEps(epsilon);
        regressor.setCacheSize(cacheSize);
        regressor.setDegree(degree);
        regressor.setCost(cost);
        regressor.setGamma(gamma);
        regressor.setCoef0(coeff0);
        regressor.setNu(nu);
        regressor.setLoss(loss);

        Integer svm_kernel = null;
        String kernel = parameters.getOrDefault("kernel", _kernel).toString();
        if (kernel.equalsIgnoreCase("rbf")) {
            svm_kernel = LibSVM.KERNELTYPE_RBF;
        } else if (kernel.equalsIgnoreCase("polynomial")) {
            svm_kernel = LibSVM.KERNELTYPE_POLYNOMIAL;
        } else if (kernel.equalsIgnoreCase("linear")) {
            svm_kernel = LibSVM.KERNELTYPE_LINEAR;
        } else if (kernel.equalsIgnoreCase("sigmoid")) {
            svm_kernel = LibSVM.KERNELTYPE_SIGMOID;
        }
        regressor.setKernelType(new SelectedTag(svm_kernel, LibSVM.TAGS_KERNELTYPE));

        Integer svm_type = null;
        String type = parameters.getOrDefault("type", _type).toString();
        if (type.equalsIgnoreCase("NU_SVR")) {
            svm_type = LibSVM.SVMTYPE_NU_SVR;
        } else if (type.equalsIgnoreCase("NU_SVC")) {
            svm_type = LibSVM.SVMTYPE_NU_SVC;
        } else if (type.equalsIgnoreCase("C_SVC")) {
            svm_type = LibSVM.SVMTYPE_C_SVC;
        } else if (type.equalsIgnoreCase("EPSILON_SVR")) {
            svm_type = LibSVM.SVMTYPE_EPSILON_SVR;
        } else if (type.equalsIgnoreCase("ONE_CLASS_SVM")) {
            svm_type = LibSVM.SVMTYPE_ONE_CLASS_SVM;
        }
        regressor.setSVMType(new SelectedTag(svm_type, LibSVM.TAGS_SVMTYPE));

        regressor.buildClassifier(data);

        WekaModel model = new WekaModel();
        model.setClassifier(regressor);

        Map<String, Double> options = new HashMap<>();
        options.put("gamma", gamma);
        options.put("coeff0", coeff0);
        options.put("degree", new Double(degree.toString()));

        Field modelField = LibSVM.class.getDeclaredField("m_Model");
        modelField.setAccessible(true);
        svm_model svmModel = (svm_model) modelField.get(regressor);
        double[][] coefs = svmModel.sv_coef;
        List<Double> coefsList = IntStream.range(0, coefs[0].length).mapToObj(i -> coefs[0][i])
                .collect(Collectors.toList());

        svm_node[][] nodes = svmModel.SV;

        List<Map<Integer, Double>> vectors = IntStream.range(0, nodes.length).mapToObj(i -> {
            Map<Integer, Double> node = new TreeMap<>();
            Arrays.stream(nodes[i]).forEach(n -> node.put(n.index, n.value));
            return node;
        }).collect(Collectors.toList());

        String pmml = PmmlUtils.createSVMModel(features, request.getPredictionFeature(), "SVM", kernel,
                svm_type, options, coefsList, vectors);
        TrainingResponse response = new TrainingResponse();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ObjectOutput out = new ObjectOutputStream(baos);
        out.writeObject(model);
        String base64Model = Base64.getEncoder().encodeToString(baos.toByteArray());
        response.setRawModel(base64Model);
        List<String> independentFeatures = features.stream()
                .filter(feature -> !feature.equals(request.getPredictionFeature()))
                .collect(Collectors.toList());
        response.setIndependentFeatures(independentFeatures);
        response.setPmmlModel(pmml);
        response.setAdditionalInfo(request.getPredictionFeature());
        response.setPredictedFeatures(
                Arrays.asList("Weka SVM prediction of " + request.getPredictionFeature()));

        return Response.ok(response).build();
    } catch (Exception ex) {
        Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex);
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build();
    }
}

From source file:org.jaqpot.algorithms.resource.WekaSVM.java

License:Open Source License

@POST
@Path("training")
public Response training(TrainingRequest request) {
    try {/*from w  w w  . j a  v a2  s . c o  m*/
        if (request.getDataset().getDataEntry().isEmpty()
                || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) {
            return Response.status(Response.Status.BAD_REQUEST)
                    .entity("Dataset is empty. Cannot train model on empty dataset.").build();
        }
        List<String> features = request.getDataset().getDataEntry().stream().findFirst().get().getValues()
                .keySet().stream().collect(Collectors.toList());

        Instances data = InstanceUtils.createFromDataset(request.getDataset(), request.getPredictionFeature());
        Map<String, Object> parameters = request.getParameters() != null ? request.getParameters()
                : new HashMap<>();

        LibSVM regressor = new LibSVM();
        Double epsilon = Double.parseDouble(parameters.getOrDefault("epsilon", _epsilon).toString());
        Double cacheSize = Double.parseDouble(parameters.getOrDefault("cacheSize", _cacheSize).toString());
        Double gamma = Double.parseDouble(parameters.getOrDefault("gamma", _gamma).toString());
        Double coeff0 = Double.parseDouble(parameters.getOrDefault("coeff0", _coeff0).toString());
        Double cost = Double.parseDouble(parameters.getOrDefault("cost", _cost).toString());
        Double nu = Double.parseDouble(parameters.getOrDefault("nu", _nu).toString());
        Double loss = Double.parseDouble(parameters.getOrDefault("loss", _loss).toString());
        Integer degree = Integer.parseInt(parameters.getOrDefault("degree", _degree).toString());

        regressor.setEps(epsilon);
        regressor.setCacheSize(cacheSize);
        regressor.setDegree(degree);
        regressor.setCost(cost);
        regressor.setGamma(gamma);
        regressor.setCoef0(coeff0);
        regressor.setNu(nu);
        regressor.setLoss(loss);

        Integer svm_kernel = null;
        String kernel = parameters.getOrDefault("kernel", _kernel).toString();
        if (kernel.equalsIgnoreCase("rbf")) {
            svm_kernel = LibSVM.KERNELTYPE_RBF;
        } else if (kernel.equalsIgnoreCase("polynomial")) {
            svm_kernel = LibSVM.KERNELTYPE_POLYNOMIAL;
        } else if (kernel.equalsIgnoreCase("linear")) {
            svm_kernel = LibSVM.KERNELTYPE_LINEAR;
        } else if (kernel.equalsIgnoreCase("sigmoid")) {
            svm_kernel = LibSVM.KERNELTYPE_SIGMOID;
        }
        regressor.setKernelType(new SelectedTag(svm_kernel, LibSVM.TAGS_KERNELTYPE));

        Integer svm_type = null;
        String type = parameters.getOrDefault("type", _type).toString();
        if (type.equalsIgnoreCase("NU_SVR")) {
            svm_type = LibSVM.SVMTYPE_NU_SVR;
        } else if (type.equalsIgnoreCase("NU_SVC")) {
            svm_type = LibSVM.SVMTYPE_NU_SVC;
        } else if (type.equalsIgnoreCase("C_SVC")) {
            svm_type = LibSVM.SVMTYPE_C_SVC;
        } else if (type.equalsIgnoreCase("EPSILON_SVR")) {
            svm_type = LibSVM.SVMTYPE_EPSILON_SVR;
        } else if (type.equalsIgnoreCase("ONE_CLASS_SVM")) {
            svm_type = LibSVM.SVMTYPE_ONE_CLASS_SVM;
        }
        regressor.setSVMType(new SelectedTag(svm_type, LibSVM.TAGS_SVMTYPE));

        regressor.buildClassifier(data);

        WekaModel model = new WekaModel();
        model.setClassifier(regressor);

        Map<String, Double> options = new HashMap<>();
        options.put("gamma", gamma);
        options.put("coeff0", coeff0);
        options.put("degree", new Double(degree.toString()));

        Field modelField = LibSVM.class.getDeclaredField("m_Model");
        modelField.setAccessible(true);
        svm_model svmModel = (svm_model) modelField.get(regressor);
        double[][] coefs = svmModel.sv_coef;
        List<Double> coefsList = IntStream.range(0, coefs[0].length).mapToObj(i -> coefs[0][i])
                .collect(Collectors.toList());

        svm_node[][] nodes = svmModel.SV;

        List<Map<Integer, Double>> vectors = IntStream.range(0, nodes.length).mapToObj(i -> {
            Map<Integer, Double> node = new TreeMap<>();
            Arrays.stream(nodes[i]).forEach(n -> node.put(n.index, n.value));
            return node;
        }).collect(Collectors.toList());

        String pmml = PmmlUtils.createSVMModel(features, request.getPredictionFeature(), "SVM", kernel,
                svm_type, options, coefsList, vectors);
        TrainingResponse response = new TrainingResponse();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ObjectOutput out = new ObjectOutputStream(baos);
        out.writeObject(model);
        String base64Model = Base64.getEncoder().encodeToString(baos.toByteArray());
        response.setRawModel(base64Model);
        List<String> independentFeatures = features.stream()
                .filter(feature -> !feature.equals(request.getPredictionFeature()))
                .collect(Collectors.toList());
        response.setIndependentFeatures(independentFeatures);
        response.setPmmlModel(pmml);
        response.setAdditionalInfo(request.getPredictionFeature());
        response.setPredictedFeatures(
                Arrays.asList("Weka SVM prediction of " + request.getPredictionFeature()));

        return Response.ok(response).build();
    } catch (Exception ex) {
        Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex);
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build();
    }
}

From source file:Tubes.Classification.java

public static void main(String[] args) throws FileNotFoundException, IOException, Exception {

    StringToWordVector filter = new StringToWordVector();

    File training = new File(classTrain);
    File testing = new File(classTest);

    BufferedReader readTrain = new BufferedReader(new FileReader(training));
    BufferedReader readTest = new BufferedReader(new FileReader(testing));

    Instances dataTrain = new Instances(readTrain);
    Instances dataTest = new Instances(readTest);

    filter.setInputFormat(dataTrain);/*from  w w  w  . ja v a  2s  .c  o  m*/
    dataTrain = Filter.useFilter(dataTrain, filter);

    dataTrain.setClassIndex(dataTrain.numAttributes() - 1);
    dataTest.setClassIndex(dataTest.numAttributes() - 1);

    Classification classify = new Classification();
    NaiveBayes bayes = new NaiveBayes();
    //        RandomForest rf = new RandomForest();
    //        BayesNet bayesNet = new BayesNet();
    LibSVM libSVM = new LibSVM();
    System.out.println("==========================Naive Bayes Evaluation===========================");
    Evaluation eval = classify.runClassifier(bayes, dataTrain, dataTest);
    System.out.println(eval.toSummaryString() + "\n");
    System.out.println(eval.toClassDetailsString() + "\n");
    System.out.println(eval.toMatrixString() + "\n");
    System.out.println("===========================================================================");
    //
    //        ====System.out.println("==============================Random Forest================================");
    //        Evaluation eval2 = classify.runClassifier(rf, dataTrain, dataTest);
    //        System.out.println(eval2.toSummaryString() + "\n");
    //        System.out.println(eval2.toClassDetailsString() + "\n");
    //        System.out.println(eval2.toMatrixString() + "\n");
    //        System.out.println("=======================================================================");
    //
    //        System.out.println("==============================Bayesian Network================================");
    //        Evaluation eval3 = classify.runClassifier(bayesNet, dataTrain, dataTest);
    //        System.out.println(eval3.toSummaryString() + "\n");
    //        System.out.println(eval3.toClassDetailsString() + "\n");
    //        System.out.println(eval3.toMatrixString() + "\n");
    //        System.out.println("===========================================================================");

    System.out.println("==============================LibSVM================================");
    libSVM.setCacheSize(512); // MB
    libSVM.setNormalize(true);
    libSVM.setShrinking(true);
    libSVM.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_LINEAR, LibSVM.TAGS_KERNELTYPE));
    libSVM.setDegree(3);
    libSVM.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE));
    Evaluation eval4 = classify.runClassifier(libSVM, dataTrain, dataTest);
    System.out.println(eval4.toSummaryString() + "\n");
    System.out.println(eval4.toClassDetailsString() + "\n");
    System.out.println(eval4.toMatrixString() + "\n");
    System.out.println("===========================================================================");
}