List of usage examples for weka.filters.supervised.attribute NominalToBinary NominalToBinary
NominalToBinary
From source file:ann.ANNOptions.java
public ANNOptions() { normalize = new Normalize(); ntb = new NominalToBinary(); output = new ArrayList<Neuron>(); layer = new ArrayList<List<Neuron>>(); layerNeuron = new ArrayList<Integer>(); weightOpt = 2;//from ww w. j a v a 2s . c om topologyOpt = 4; activationFunctionOpt = 1; hiddenLayer = 1; layerNeuron.add(3); maxIteration = 1000; momentum = 0.2; learningRate = 0.3; threshold = 0.01; }
From source file:ann.SingleLayerPerceptron.java
@Override public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); annOptions = new ANNOptions(); annOptions = annOptions.loadConfiguration(); output = new ArrayList<Neuron>(); normalize = new Normalize(); ntb = new NominalToBinary(); output = annOptions.output;//www. j ava2 s.co m // remove instances with missing class data = new Instances(data); data.deleteWithMissingClass(); //nominal to binary filter ntb.setInputFormat(data); data = new Instances(Filter.useFilter(data, ntb)); //normalize filter normalize.setInputFormat(data); data = new Instances(Filter.useFilter(data, normalize)); // do main function doPerceptron(data); }
From source file:CGLSMethod.LinearRegression.java
License:Open Source License
/** * Builds a regression model for the given data. * * @param data the training data to be used for generating the * linear regression function//from w ww . ja v a 2 s . c o m * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances data) throws Exception { // Preprocess instances if (!m_checksTurnedOff) { m_TransformFilter = new NominalToBinary(); m_TransformFilter.setInputFormat(data); data = Filter.useFilter(data, m_TransformFilter); m_MissingFilter = new ReplaceMissingValues(); m_MissingFilter.setInputFormat(data); data = Filter.useFilter(data, m_MissingFilter); data.deleteWithMissingClass(); } else { m_TransformFilter = null; m_MissingFilter = null; } m_ClassIndex = data.classIndex(); m_TransformedData = data; // Turn all attributes on for a start m_SelectedAttributes = new boolean[data.numAttributes()]; for (int i = 0; i < data.numAttributes(); i++) { if (i != m_ClassIndex) { m_SelectedAttributes[i] = true; } } m_Coefficients = null; // Compute means and standard deviations m_Means = new double[data.numAttributes()]; m_StdDevs = new double[data.numAttributes()]; for (int j = 0; j < data.numAttributes(); j++) { if (j != data.classIndex()) { m_Means[j] = data.meanOrMode(j); m_StdDevs[j] = Math.sqrt(data.variance(j)); if (m_StdDevs[j] == 0) { m_SelectedAttributes[j] = false; } } } m_ClassStdDev = Math.sqrt(data.variance(m_TransformedData.classIndex())); m_ClassMean = data.meanOrMode(m_TransformedData.classIndex()); // Perform the regression findBestModel(); // Save memory m_TransformedData = new Instances(data, 0); }
From source file:Classifier.supervised.LinearRegression.java
License:Open Source License
/** * Builds a regression model for the given data. * * @param data the training data to be used for generating the * linear regression function//from ww w . j a v a2s . c om * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances data) throws Exception { m_ModelBuilt = false; if (!m_checksTurnedOff) { // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class data = new Instances(data); data.deleteWithMissingClass(); } // Preprocess instances if (!m_checksTurnedOff) { m_TransformFilter = new NominalToBinary(); m_TransformFilter.setInputFormat(data); data = Filter.useFilter(data, m_TransformFilter); m_MissingFilter = new ReplaceMissingValues(); m_MissingFilter.setInputFormat(data); data = Filter.useFilter(data, m_MissingFilter); data.deleteWithMissingClass(); } else { m_TransformFilter = null; m_MissingFilter = null; } m_ClassIndex = data.classIndex(); m_TransformedData = data; // Turn all attributes on for a start m_SelectedAttributes = new boolean[data.numAttributes()]; for (int i = 0; i < data.numAttributes(); i++) { if (i != m_ClassIndex) { m_SelectedAttributes[i] = true; } } m_Coefficients = null; // Compute means and standard deviations m_Means = new double[data.numAttributes()]; m_StdDevs = new double[data.numAttributes()]; for (int j = 0; j < data.numAttributes(); j++) { if (j != data.classIndex()) { m_Means[j] = data.meanOrMode(j); m_StdDevs[j] = Math.sqrt(data.variance(j)); if (m_StdDevs[j] == 0) { m_SelectedAttributes[j] = false; } } } m_ClassStdDev = Math.sqrt(data.variance(m_TransformedData.classIndex())); m_ClassMean = data.meanOrMode(m_TransformedData.classIndex()); // Perform the regression findBestModel(); // Save memory if (m_Minimal) { m_TransformedData = null; m_Means = null; m_StdDevs = null; } else { m_TransformedData = new Instances(data, 0); } m_ModelBuilt = true; }
From source file:com.mycompany.tubesann.MyANN.java
public static Instances setNominalToBinary(Instances instances) { NominalToBinary ntb = new NominalToBinary(); Instances newInstances = null;/*from w w w . j ava2 s.c o m*/ try { ntb.setInputFormat(instances); newInstances = new Instances(Filter.useFilter(instances, ntb)); } catch (Exception e) { e.printStackTrace(); } return newInstances; }
From source file:ml.ann.MainPTR.java
public static void main(String[] args) throws FileNotFoundException, IOException, Exception { boolean randomWeight; double weightawal = 0.0; double learningRate = 0.0001; double threshold = 0.00; double momentum = 0.00; int maxEpoch = 100000; int nCrossValidate = 2; m_nominalToBinaryFilter = new NominalToBinary(); m_normalize = new Normalize(); Scanner in = new Scanner(System.in); System.out.println("Lokasi file: "); String filepath = in.nextLine(); filepath = "test-arffs/iris.arff"; System.out.println("--- Algoritma ---"); System.out.println("1. Perceptron Training Rule"); System.out.println("2. Delta Rule Incremental"); System.out.println("3. Delta Rule Batch"); System.out.println("Pilihan Algoritma (1/2/3) : "); int choice = in.nextInt(); String temp = in.nextLine();//from ww w.j av a 2 s . c o m System.out.println("Apakah Anda ingin memasukkan nilai weight awal? (YES/NO)"); String isRandom = in.nextLine(); System.out.println("Apakah Anda ingin memasukkan konfigurasi? (YES/NO)"); String config = in.nextLine(); if (config.equalsIgnoreCase("yes")) { System.out.print("Masukkan nilai learning rate: "); learningRate = in.nextDouble(); System.out.print("Masukkan nilai threshold: "); threshold = in.nextDouble(); System.out.print("Masukkan nilai momentum: "); momentum = in.nextDouble(); System.out.print("Masukkan jumlah epoch: "); threshold = in.nextInt(); System.out.print("Masukkan jumlah folds untuk crossvalidate: "); nCrossValidate = in.nextInt(); } randomWeight = isRandom.equalsIgnoreCase("yes"); if (randomWeight) { System.out.print("Masukkan nilai weight awal: "); weightawal = Double.valueOf(in.nextLine()); } //print config if (isRandom.equalsIgnoreCase("yes")) { System.out.print("isRandom | "); } else { System.out.print("Weight " + weightawal + " | "); } System.out.print("L.rate " + learningRate + " | "); System.out.print("Max Epoch " + maxEpoch + " | "); System.out.print("Threshold " + threshold + " | "); System.out.print("Momentum " + momentum + " | "); System.out.print("Folds " + nCrossValidate + " | "); System.out.println(); FileReader trainreader = new FileReader(filepath); Instances train = new Instances(trainreader); train.setClassIndex(train.numAttributes() - 1); m_nominalToBinaryFilter.setInputFormat(train); train = new Instances(Filter.useFilter(train, m_nominalToBinaryFilter)); m_normalize.setInputFormat(train); train = new Instances(Filter.useFilter(train, m_normalize)); MultiClassPTR tempMulti = new MultiClassPTR(choice, randomWeight, learningRate, maxEpoch, threshold); tempMulti.buildClassifier(train); Evaluation eval = new Evaluation(new Instances(train)); eval.evaluateModel(tempMulti, train); System.out.println(eval.toSummaryString()); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:ml.ann.SinglePTR.java
@Override public double classifyInstance(Instance instnc) throws Exception { m_nominalToBinaryFilter = new NominalToBinary();//To change body of generated methods, choose Tools | Templates. return 0;//from w ww .jav a2s .c om }