List of usage examples for weka.core Instance classAttribute
public Attribute classAttribute();
From source file:NaiveBayes.NaiveBayes.java
@Override public double classifyInstance(Instance last) { double prob[] = new double[last.classAttribute().numValues()]; for (int classIndex = 0; classIndex < last.attribute(last.classIndex()).numValues(); classIndex++) {//classifikasi double temp = 1; int i = 0; for (Atribut attr : getList()) { if (i == last.classIndex()) i++;/*from w w w . ja v a 2s . c o m*/ //System.out.println(attr.getName()+"="+last.attribute(i).name()); temp *= attr.getFrekuensiNilai(last.attribute(last.classIndex()).value(classIndex), last.toString(i), last.value(i), last.attribute(i).isNumeric()) / numEachClass[classIndex]; i++; } double res; res = numEachClass[classIndex] / last.numAttributes() * temp; prob[classIndex] = res; } return maxIndex(prob); }
From source file:naivebayes.NBTubesAI.java
@Override public double classifyInstance(Instance instance) throws Exception { int jumlahKelas = instance.classAttribute().numValues(); double[] classifyResult = new double[jumlahKelas]; //iterasi menghitung probabilitas untuk seluruh kelas for (int i = 0; i < jumlahKelas; i++) { //Rumus probabilitas Naive Bayes here classifyResult[i] = (double) classCount.get(i + 0.0) / numInstance; Enumeration<Attribute> enumAttr = instance.enumerateAttributes(); while (enumAttr.hasMoreElements()) { Attribute temp = enumAttr.nextElement(); if (!instance.isMissing(temp)) { try { classifyResult[i] = classifyResult[i] * distribution.get(temp.name()).get(instance.stringValue(temp)).get(i + 0.0); } catch (NullPointerException e) { classifyResult[i] = 0; }/* ww w .ja v a2 s . c om*/ } } } double maxValue = 0; int currentIndex = 0; for (int i = 0; i < jumlahKelas; i++) { if (maxValue < classifyResult[i]) { currentIndex = i; maxValue = classifyResult[i]; } } return currentIndex; }
From source file:naivebayes.NBTubesAI.java
@Override public double[] distributionForInstance(Instance instance) throws Exception { int jumlahKelas = instance.classAttribute().numValues(); double[] classifyResult = new double[jumlahKelas]; //iterasi menghitung probabilitas untuk seluruh kelas for (int i = 0; i < jumlahKelas; i++) { //Rumus probabilitas Naive Bayes here classifyResult[i] = (double) classCount.get(i + 0.0) / numInstance; Enumeration<Attribute> enumAttr = instance.enumerateAttributes(); while (enumAttr.hasMoreElements()) { Attribute temp = enumAttr.nextElement(); if (!instance.isMissing(temp)) { try { classifyResult[i] = classifyResult[i] * distribution.get(temp.name()).get(instance.stringValue(temp)).get(i + 0.0); } catch (NullPointerException e) { }/* w w w. ja v a2 s .com*/ } } } return classifyResult; }
From source file:net.paudan.evosvm.LibLINEAR.java
License:Open Source License
/** * Computes the distribution for a given instance. * * @param instance the instance for which distribution is computed * @return the distribution//from w ww . j a v a 2 s.c om * @throws Exception if the distribution can't be computed successfully */ public double[] distributionForInstance(Instance instance) throws Exception { if (!getDoNotReplaceMissingValues()) { m_ReplaceMissingValues.input(instance); m_ReplaceMissingValues.batchFinished(); instance = m_ReplaceMissingValues.output(); } if (getConvertNominalToBinary() && m_NominalToBinary != null) { m_NominalToBinary.input(instance); m_NominalToBinary.batchFinished(); instance = m_NominalToBinary.output(); } if (m_Filter != null) { m_Filter.input(instance); m_Filter.batchFinished(); instance = m_Filter.output(); } FeatureNode[] x = instanceToArray(instance); double[] result = new double[instance.numClasses()]; if (m_ProbabilityEstimates) { if (m_SolverType != SolverType.L2R_LR && m_SolverType != SolverType.L2R_LR_DUAL && m_SolverType != SolverType.L1R_LR) { throw new WekaException("probability estimation is currently only " + "supported for L2-regularized logistic regression"); } int[] labels = m_Model.getLabels(); double[] prob_estimates = new double[instance.numClasses()]; Linear.predictProbability(m_Model, x, prob_estimates); // Return order of probabilities to canonical weka attribute order for (int k = 0; k < labels.length; k++) { result[labels[k]] = prob_estimates[k]; } } else { int prediction = (int) Linear.predict(m_Model, x); assert (instance.classAttribute().isNominal()); result[prediction] = 1; } return result; }
From source file:net.sf.bddbddb.order.WekaInterface.java
License:LGPL
public static double cvError(int numFolds, Instances data0, String cClassName) { if (data0.numInstances() < numFolds) return Double.NaN; //more folds than elements if (numFolds == 0) return Double.NaN; // no folds if (data0.numInstances() == 0) return 0; //no instances Instances data = new Instances(data0); //data.randomize(new Random(System.currentTimeMillis())); data.stratify(numFolds);//from www . j a v a 2s . co m Assert._assert(data.classAttribute() != null); double[] estimates = new double[numFolds]; for (int i = 0; i < numFolds; ++i) { Instances trainData = data.trainCV(numFolds, i); Assert._assert(trainData.classAttribute() != null); Assert._assert(trainData.numInstances() != 0, "Cannot train classifier on 0 instances."); Instances testData = data.testCV(numFolds, i); Assert._assert(testData.classAttribute() != null); Assert._assert(testData.numInstances() != 0, "Cannot test classifier on 0 instances."); int temp = FindBestDomainOrder.TRACE; FindBestDomainOrder.TRACE = 0; Classifier classifier = buildClassifier(cClassName, trainData); FindBestDomainOrder.TRACE = temp; int count = testData.numInstances(); double loss = 0; double sum = 0; for (Enumeration e = testData.enumerateInstances(); e.hasMoreElements();) { Instance instance = (Instance) e.nextElement(); Assert._assert(instance != null); Assert._assert(instance.classAttribute() != null && instance.classAttribute() == trainData.classAttribute()); try { double testClass = classifier.classifyInstance(instance); double weight = instance.weight(); if (testClass != instance.classValue()) loss += weight; sum += weight; } catch (Exception ex) { FindBestDomainOrder.out.println("Exception while classifying: " + instance + "\n" + ex); } } estimates[i] = 1 - loss / sum; } double average = 0; for (int i = 0; i < numFolds; ++i) average += estimates[i]; return average / numFolds; }
From source file:org.conqat.engine.commons.machine_learning.BaseWekaClassifier.java
License:Apache License
/** * Returns the classification as a String for the given classification * object. The method buildClassifier needs to be called before. *//* www . j a v a 2 s .c o m*/ public String getClassification(T classificationObject) throws ConQATException { weka.core.Instance instance = wekaDataSetCreator.createWekaUnlabeledInstance(classificationObject); try { CCSMAssert.isNotNull(wekaClassifier, "Weka Classifier called to classify instance although it was null."); double classification = wekaClassifier.classifyInstance(instance); return instance.classAttribute().value((int) classification); } catch (Exception e) { throw new ConQATException(e); } }
From source file:org.esa.nest.gpf.SGD.java
/** * Updates the classifier with the given instance. * * @param instance the new training instance to include in the model * @exception Exception if the instance could not be incorporated in the * model./*from w ww . ja va 2s . c o m*/ */ @Override public void updateClassifier(Instance instance) throws Exception { if (!instance.classIsMissing()) { double wx = dotProd(instance, m_weights, instance.classIndex()); double y; double z; if (instance.classAttribute().isNominal()) { y = (instance.classValue() == 0) ? -1 : 1; z = y * (wx + m_weights[m_weights.length - 1]); } else { y = instance.classValue(); z = y - (wx + m_weights[m_weights.length - 1]); y = 1; } // Compute multiplier for weight decay double multiplier = 1.0; if (m_numInstances == 0) { multiplier = 1.0 - (m_learningRate * m_lambda) / m_t; } else { multiplier = 1.0 - (m_learningRate * m_lambda) / m_numInstances; } for (int i = 0; i < m_weights.length - 1; i++) { m_weights[i] *= multiplier; } // Only need to do the following if the loss is non-zero if (m_loss != HINGE || (z < 1)) { // Compute Factor for updates double factor = m_learningRate * y * dloss(z); // Update coefficients for attributes int n1 = instance.numValues(); for (int p1 = 0; p1 < n1; p1++) { int indS = instance.index(p1); if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) { m_weights[indS] += factor * instance.valueSparse(p1); } } // update the bias m_weights[m_weights.length - 1] += factor; } m_t++; } }
From source file:org.esa.nest.gpf.SGD.java
/** * Computes the distribution for a given instance * * @param instance the instance for which distribution is computed * @return the distribution/*from w w w .j a va 2 s.c o m*/ * @throws Exception if the distribution can't be computed successfully */ @Override public double[] distributionForInstance(Instance inst) throws Exception { double[] result = (inst.classAttribute().isNominal()) ? new double[2] : new double[1]; if (m_replaceMissing != null) { m_replaceMissing.input(inst); inst = m_replaceMissing.output(); } if (m_nominalToBinary != null) { m_nominalToBinary.input(inst); inst = m_nominalToBinary.output(); } if (m_normalize != null) { m_normalize.input(inst); inst = m_normalize.output(); } double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale; double z = (wx + m_weights[m_weights.length - 1]); if (inst.classAttribute().isNumeric()) { result[0] = z; return result; } if (z <= 0) { // z = 0; if (m_loss == LOGLOSS) { result[0] = 1.0 / (1.0 + Math.exp(z)); result[1] = 1.0 - result[0]; } else { result[0] = 1; } } else { if (m_loss == LOGLOSS) { result[1] = 1.0 / (1.0 + Math.exp(-z)); result[0] = 1.0 - result[1]; } else { result[1] = 1; } } return result; }
From source file:org.scripps.branch.classifier.ManualTree.java
License:Open Source License
/** * Trying to get generate distribution of classes * // w ww.ja v a 2s. c o m * @param Instances * @Param Attribute index to get distribution of * @Param HashMap to put data into * * @return HashMap of class distribution data */ protected HashMap addDistributionData(Instances instances, int attIndex, HashMap distMap) throws Exception { Map<String, Comparable> temp = new HashMap<String, Comparable>(); ArrayList<Object> distData = new ArrayList(); // GenerateCSV csv = new GenerateCSV(); // String data = ""; boolean isNominal = false; instances.sort(attIndex); for (int i = 0; i < instances.numInstances(); i++) { Instance inst = instances.instance(i); if (!Double.isNaN(inst.value(attIndex))) { temp = new HashMap<String, Comparable>(); if (inst.attribute(attIndex).isNominal()) { temp.put("value", inst.attribute(attIndex).value((int) inst.value(attIndex))); isNominal = true; // data+=inst.attribute(m_Attribute).value((int)inst.value(m_Attribute))+","; } else { temp.put("value", inst.value(attIndex)); // data+=inst.value(att)+","; } temp.put("classprob", inst.classAttribute().value((int) inst.classValue())); // data+=inst.classAttribute().value((int) // inst.classValue())+"\n"; distData.add(temp); } } if (!distData.isEmpty()) { distMap.put("dataArray", distData); distMap.put("isNominal", isNominal); setDistributionData(distMap); } return distMap; // To check if data is being generated right. // csv.generateCsvFile("/home/karthik/Documents/distribution.csv", // data); }
From source file:sg.edu.nus.comp.nlp.ims.classifiers.CMultiClassesSVM.java
License:Open Source License
@Override public double[] distributionForInstance(Instance p_Instance) throws Exception { double[] probs = new double[p_Instance.numClasses()]; Instance newInst = this.filterInstance(p_Instance); newInst.setDataset(this.m_OutputFormat); newInst.setMissing(newInst.classAttribute()); if (this.m_Classifiers == null) { return new double[] { 1 }; }/* w w w. j a va2 s .c o m*/ if (this.m_Classifiers.length == 1) { return this.m_Classifiers[0].distributionForInstance(newInst); } for (int i = 0; i < this.m_Classifiers.length; i++) { if (this.m_Classifiers[i] != null) { double[] current = this.m_Classifiers[i].distributionForInstance(newInst); for (int j = 0; j < this.m_ClassAttribute.numValues(); j++) { if (j == i) { probs[j] += current[1]; } else { probs[j] += current[0]; } } } } if (Utils.gr(Utils.sum(probs), 0)) { Utils.normalize(probs); return probs; } else { return m_ZeroR.distributionForInstance(newInst); } }