List of usage examples for weka.core Instances numClasses
publicint numClasses()
From source file:algoritmogeneticocluster.Cromossomo.java
private double getMicroAverage(Evaluation eval, Instances data) { double TP = 0; double TP_plus_FP = 0; double TP_plus_FN = 0; double microPrecision; double microRecall; double microMeasure; for (int i = 0; i < data.numClasses(); i++) { TP += eval.truePositiveRate(i);/* w ww. j a va 2s. c o m*/ TP_plus_FP += eval.truePositiveRate(i) + eval.falsePositiveRate(i); TP_plus_FN += eval.truePositiveRate(i) + eval.falseNegativeRate(i); } microPrecision = TP / TP_plus_FP; microRecall = TP / TP_plus_FN; microMeasure = (microPrecision * microRecall * 2) / (microPrecision + microRecall); //System.out.println("microMeasure: " + microMeasure); return microMeasure; }
From source file:AnDE.wdAnDEonline.java
License:Open Source License
@Override public void buildClassifier(Instances instances) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(instances); // remove instances with missing class instances.deleteWithMissingClass();/*from w w w.j a va 2 s . c o m*/ nInstances = instances.numInstances(); nAttributes = instances.numAttributes() - 1; nc = instances.numClasses(); probs = new double[nc]; paramsPerAtt = new int[nAttributes]; for (int u = 0; u < nAttributes; u++) { paramsPerAtt[u] = instances.attribute(u).numValues(); } /* * Initialize structure array based on m_S */ if (m_S.equalsIgnoreCase("A0DE")) { // A0DE numTuples = 0; } else if (m_S.equalsIgnoreCase("A1DE")) { // A1DE numTuples = 1; } else if (m_S.equalsIgnoreCase("A2DE")) { // A2DE numTuples = 2; } /* * ---------------------------------------------------------------------------------------- * Start Parameter Learning Process * ---------------------------------------------------------------------------------------- */ int scheme = 1; /* * --------------------------------------------------------------------------------------------- * Intitialize data structure * --------------------------------------------------------------------------------------------- */ scheme = plTechniques.MAP; logDComputer = LogDistributionComputerAnDE.getDistributionComputer(numTuples, scheme); dParameters_ = new wdAnDEParametersFlat(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples, m_MVerb); if (m_MVerb) System.out.println("All data structures are initialized. Starting to estimate parameters."); if (nInstances > 0) { for (int i = 0; i < nInstances; i++) { Instance instance = instances.instance(i); dParameters_.updateFirstPass(instance); } } }
From source file:ann.ANNOptions.java
public void initWeightsSLP(Instances data) throws Exception { ntb.setInputFormat(data);//from w w w . j ava 2 s .c o m data = new Instances(Filter.useFilter(data, ntb)); //normalize filter normalize.setInputFormat(data); data = new Instances(Filter.useFilter(data, normalize)); int nAttr = data.numAttributes(); Scanner sc = new Scanner(System.in); int nOutput; if (data.numClasses() <= 2 && topologyOpt == 1) { nOutput = 1; } else { nOutput = data.numClasses(); } for (int j = 0; j < nOutput; j++) { Neuron temp = new Neuron(); if (weightOpt == 1) { // Random for (int i = 0; i < nAttr; i++) { Random random = new Random(); temp.weights.add(random.nextDouble()); // temp.weights.add(0.0); } } else { // Given System.out.println("Output-" + j); for (int i = 0; i < nAttr - 1; i++) { System.out.print("Weight-" + (i + 1) + ": "); temp.weights.add(sc.nextDouble()); } System.out.print("Bias weight: "); temp.weights.add(sc.nextDouble()); } output.add(temp); } }
From source file:ann.ANNOptions.java
public void initWeightsMLP(Instances data) throws Exception { ntb.setInputFormat(data);/*from www. ja v a 2 s . c o m*/ data = new Instances(Filter.useFilter(data, ntb)); //normalize filter normalize.setInputFormat(data); data = new Instances(Filter.useFilter(data, normalize)); int nAttr = data.numAttributes(); Scanner sc = new Scanner(System.in); int nOutput = data.numClasses(); for (int i = 0; i < hiddenLayer; i++) { if (weightOpt == 2) { System.out.println("Layer-" + (i + 1)); } List<Neuron> neuronLayer = new ArrayList<Neuron>(); for (int j = 0; j < layerNeuron.get(i) + 1; j++) { if (weightOpt == 2) if (weightOpt == 2) { System.out.println("Neuron-" + (j + 1)); } Neuron neuron = new Neuron(); if (i == 0) { // weight from input layer for (int k = 0; k < nAttr; k++) { if (weightOpt == 1) { // random Random random = new Random(); neuron.weights.add(random.nextDouble()); // neuron.weights.add(0.0); } else { // given if (k < nAttr - 1) { if (weightOpt == 2) { System.out.print("Weight input-" + (k + 1) + ": "); } } else { if (weightOpt == 2) { System.out.print("Weight bias: "); } } neuron.weights.add(sc.nextDouble()); } } neuronLayer.add(neuron); } else if (j < layerNeuron.get(i)) { // weight from hidden layer for (int k = 0; k < layerNeuron.get(i - 1) + 1; k++) { // layer neuron + 1, 1 for bias if (weightOpt == 1) { // random Random random = new Random(); neuron.weights.add(random.nextDouble()); // neuron.weights.add(0.0); } else { // given if (k < layerNeuron.get(i - 1)) { if (weightOpt == 2) { System.out.print("Weight neuron-" + (k + 1) + ": "); } } else { if (weightOpt == 2) { System.out.print("Weight bias: "); } } neuron.weights.add(sc.nextDouble()); } } neuronLayer.add(neuron); } } if (i != 0) { Neuron bias = new Neuron(); neuronLayer.add(bias); } layer.add(neuronLayer); } //last hidden layer to output List<Neuron> neuronLayer = new ArrayList<Neuron>(); for (int i = 0; i < nOutput; i++) { Neuron neuron = new Neuron(); for (int j = 0; j < layerNeuron.get(layerNeuron.size() - 1) + 1; j++) { if (weightOpt == 1) { // random Random random = new Random(); // neuron.weights.add(random.nextDouble()); neuron.weights.add(0.0); } else { // given if (j < layerNeuron.get(layerNeuron.size() - 1)) { System.out.print("Weight neuron-" + (j + 1) + ": "); } else { System.out.print("Bias: "); } neuron.weights.add(sc.nextDouble()); } } neuronLayer.add(neuron); } layer.add(neuronLayer); }
From source file:ANN.MultilayerPerceptron.java
public MultilayerPerceptron(Instances i, double rate, int itter, int numHidden) { learningRate = rate;/*w ww . j a v a 2 s . c o m*/ listHidden = new ArrayList<>(); for (int num = 0; num < numHidden + 1; num++) { listHidden.add(new Node(i.numAttributes())); } listOutput = new ArrayList<>(); for (int num = 0; num < i.numClasses(); num++) { listOutput.add(new Node(listHidden.size())); } itteration = itter; listDoubleinstance = new double[i.numInstances()]; for (int numIns = 0; numIns < i.numInstances(); numIns++) { listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()]; } }
From source file:ANN.MultiplePerceptron.java
public MultiplePerceptron(Instances i, int numNode, double rate) { listNodeHidden = new ArrayList<>(); for (int num = 0; num < numNode + 1; num++) { listNodeHidden.add(new Node(i.numAttributes())); }//from w w w .j ava 2 s .c om listNodeOutput = new ArrayList<>(); for (int num = 0; num < i.numClasses(); num++) { listNodeOutput.add(new Node(listNodeHidden.size())); } listDoubleinstance = new double[i.numInstances()]; for (int numIns = 0; numIns < i.numInstances(); numIns++) { listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()]; } learningRate = rate; }
From source file:ANN_Single.SinglelayerPerceptron.java
@Override public void buildClassifier(Instances i) { listOutput = new ArrayList<>(); for (int num = 0; num < i.numClasses(); num++) { listOutput.add(new Node(i.numAttributes())); }/*from www. j a v a2s. c om*/ while (true) {//ulang iterasi // System.out.println(); // System.out.println("iterasi "+itt); for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) { //buat list input // System.out.print(idxInstance+" "); ArrayList<Double> listInput = new ArrayList<>(); listInput.add(1.0); for (int idx = 0; idx < i.numAttributes() - 1; idx++) { listInput.add(i.get(idxInstance).value(idx)); } //hitung output layer for (int idxOutput = 0; idxOutput < listOutput.size(); idxOutput++) { output(listInput, idxOutput); // listOutput.get(idxOutput).setValue(outputVal); // System.out.print(listOutput.get(idxOutput).getValue()+" "); } // System.out.println(); //hitung error calculateError(idxInstance); //update bobot updateWeight(listInput); } double error = 0; for (int idxErr = 0; idxErr < i.numInstances(); idxErr++) { for (int idx = 0; idx < listOutput.size(); idx++) { error += Math.pow(listOutput.get(idx).getError(), 2) / 2; // System.out.println(listOutput.get(idx).getError()); } // System.out.println(error); } System.out.println(error); System.out.println(); if (error <= 0) break; } fold++; System.out.println("Fold ke-" + fold); double error = 0; for (int idxErr = 0; idxErr < i.numInstances(); idxErr++) { for (Node listOutput1 : listOutput) { error += Math.pow(listOutput1.getError(), 2) / 2; // System.out.println(listOutput1.getError()); } // System.out.println(error); } System.out.println("error " + error); for (int idx = 0; idx < listOutput.size(); idx++) { System.out.println("Output value " + listOutput.get(idx).getValue()); System.out.println("Output error " + listOutput.get(idx).getError()); for (int idx2 = 0; idx2 < listOutput.get(idx).getWeightSize(); idx2++) System.out.println("Output weight" + listOutput.get(idx).getWeightFromList(idx2)); } }
From source file:ANN_single2.MultilayerPerceptron.java
public MultilayerPerceptron(Instances i, int numHide, double rate, double thres) { learningRate = rate;// w w w . j av a 2 s . c o m threshold = thres; numHiden = numHide; //inisialisasi array hidden listHidden = new ArrayList<>(); for (int idx = 0; idx < numHiden; idx++) { listHidden.add(new Node(i.numAttributes())); //1 untuk bias } //inialisasi array output listOutput = new ArrayList<>(); for (int idx = 0; idx < i.numClasses(); idx++) { listOutput.add(new Node(listHidden.size())); } }
From source file:ANN_single2.SinglelayerPerceptron.java
@Override public void buildClassifier(Instances i) { listOutput = new ArrayList<>(); for (int idx = 0; idx < i.numClasses(); idx++) { listOutput.add(new Node(i.numAttributes())); }/* ww w . ja v a 2 s . co m*/ //mengubah class menjadi numeric (diambil indexnya) listDoubleinstance = new double[i.numInstances()]; for (int numIns = 0; numIns < i.numInstances(); numIns++) { listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()]; } double error = 0; for (int iter = 0; iter < itteration; iter++) { double errorThres = 0; for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) { //buat list input ArrayList<Double> listInput = new ArrayList<>(); listInput.add(1.0); //ini bias for (int idx = 0; idx < i.numAttributes() - 1; idx++) { listInput.add(i.get(idxInstance).value(idx)); } //Hitung output rumus = sigmoid dari sigma for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) { output(listInput, idxOut); } //Hitung error calculateError(idxInstance); //update bobot updateBobot(listInput); } for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) { errorThres += Math.pow(listOutput.get(idxOut).getError(), 2) / 2; } if (errorThres <= threshold) break; // System.out.println(errorThres); } // fold++; // for (int idx =0; idx < i.numInstances(); idx++) { // for (int idxOut=0; idxOut < listOutput.size(); idxOut++) { // error += Math.pow(listOutput.get(idxOut).getError(), 2)/2; // } // } // System.out.println("Fold " + fold); // System.out.println("error " + error); }
From source file:bme.mace.logicdomain.Evaluation.java
License:Open Source License
/** * Initializes all the counters for the evaluation and also takes a cost * matrix as parameter. Use <code>useNoPriors()</code> if the dataset is the * test set and you can't initialize with the priors from the training set via * <code>setPriors(Instances)</code>. * /*from w w w. j a va2 s . c om*/ * @param data set of training instances, to get some header information and * prior class distribution information * @param costMatrix the cost matrix---if null, default costs will be used * @throws Exception if cost matrix is not compatible with data, the class is * not defined or the class is numeric * @see #useNoPriors() * @see #setPriors(Instances) */ public Evaluation(Instances data, CostMatrix costMatrix) throws Exception { m_NumClasses = data.numClasses(); m_NumFolds = 1; m_ClassIsNominal = data.classAttribute().isNominal(); if (m_ClassIsNominal) { m_ConfusionMatrix = new double[m_NumClasses][m_NumClasses]; m_ClassNames = new String[m_NumClasses]; for (int i = 0; i < m_NumClasses; i++) { m_ClassNames[i] = data.classAttribute().value(i); } } m_CostMatrix = costMatrix; if (m_CostMatrix != null) { if (!m_ClassIsNominal) { throw new Exception("Class has to be nominal if cost matrix " + "given!"); } if (m_CostMatrix.size() != m_NumClasses) { throw new Exception("Cost matrix not compatible with data!"); } } m_ClassPriors = new double[m_NumClasses]; setPriors(data); m_MarginCounts = new double[k_MarginResolution + 1]; }